From 0029d00102d5e669bbbf71107e13f94ff2ea6d93 Mon Sep 17 00:00:00 2001 From: Peter Banda Date: Fri, 20 Dec 2024 15:24:43 +0100 Subject: [PATCH] Anthropic bedrock chat completion examples --- ...tCompletionStreamedWithOpenAIAdapter.scala | 36 ++++++++++++++ ...reateChatCompletionWithOpenAIAdapter.scala | 34 ++++++++++++++ .../AnthropicBedrockCreateMessage.scala | 47 +++++++++++++++++++ ...nthropicBedrockCreateMessageStreamed.scala | 39 +++++++++++++++ .../AnthropicCreateCachedMessage.scala | 4 +- ...tCompletionStreamedWithOpenAIAdapter.scala | 3 +- 6 files changed, 160 insertions(+), 3 deletions(-) create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionWithOpenAIAdapter.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessage.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessageStreamed.scala diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter.scala new file mode 100644 index 00000000..d9aa6e6e --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter.scala @@ -0,0 +1,36 @@ +package io.cequence.openaiscala.examples.nonopenai + +import akka.stream.scaladsl.Sink +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage} +import io.cequence.openaiscala.examples.ExampleBase +import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService + +import scala.concurrent.Future + +// requires `openai-scala-anthropic-client` as a dependency and `ANTHROPIC_API_KEY` environment variable to be set +object AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter + extends ExampleBase[OpenAIChatCompletionStreamedService] { + + override val service: OpenAIChatCompletionStreamedService = + ChatCompletionProvider.anthropicBedrock + + private val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserMessage("What is the weather like in Norway?") + ) + override protected def run: Future[_] = { + service + .createChatCompletionStreamed( + messages = messages, + settings = CreateChatCompletionSettings( + model = NonOpenAIModelId.claude_3_5_sonnet_20240620 + ) + ) + .runWith( + Sink.foreach { response => + print(response.choices.headOption.flatMap(_.delta.content).getOrElse("")) + } + ) + } +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionWithOpenAIAdapter.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionWithOpenAIAdapter.scala new file mode 100644 index 00000000..b82f7b22 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateChatCompletionWithOpenAIAdapter.scala @@ -0,0 +1,34 @@ +package io.cequence.openaiscala.examples.nonopenai + +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage} +import io.cequence.openaiscala.examples.ExampleBase +import io.cequence.openaiscala.service.OpenAIChatCompletionService + +import scala.concurrent.Future + +// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set +object AnthropicBedrockCreateChatCompletionWithOpenAIAdapter + extends ExampleBase[OpenAIChatCompletionService] { + + override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropicBedrock + + private val messages = Seq( + SystemMessage("You are a drunk assistant!"), + UserMessage("What is the weather like in Norway?") + ) + + private val modelId = + // using 'us.' prefix because of the cross-region inference (enabled only in the us) + "us." + NonOpenAIModelId.bedrock_claude_3_5_haiku_20241022_v1_0 + + override protected def run: Future[_] = + service + .createChatCompletion( + messages = messages, + settings = CreateChatCompletionSettings(modelId) + ) + .map { content => + println(content.choices.headOption.map(_.message.content).getOrElse("N/A")) + } +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessage.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessage.scala new file mode 100644 index 00000000..948d8d6b --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessage.scala @@ -0,0 +1,47 @@ +package io.cequence.openaiscala.examples.nonopenai + +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase +import io.cequence.openaiscala.anthropic.domain.Message +import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage} +import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse +import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings +import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory} +import io.cequence.openaiscala.domain.NonOpenAIModelId +import io.cequence.openaiscala.examples.ExampleBase + +import scala.concurrent.Future + +// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set +object AnthropicBedrockCreateMessage extends ExampleBase[AnthropicService] { + + override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock() + + private val messages: Seq[Message] = Seq( + SystemMessage("You are a drunk assistant!"), + UserMessage("What is the weather like in Norway?") + ) + + private val modelId = + // using 'us.' prefix because of the cross-region inference (enabled only in the us) + "us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0 + + override protected def run: Future[_] = + service + .createMessage( + messages, + settings = AnthropicCreateMessageSettings( + model = modelId, + max_tokens = 4096, + temperature = Some(1.0) + ) + ) + .map(printMessageContent) + + private def printMessageContent(response: CreateMessageResponse) = { + val text = + response.content.blocks.collect { case ContentBlockBase(TextBlock(text), _) => text } + .mkString(" ") + println(text) + } +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessageStreamed.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessageStreamed.scala new file mode 100644 index 00000000..7074479f --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicBedrockCreateMessageStreamed.scala @@ -0,0 +1,39 @@ +package io.cequence.openaiscala.examples.nonopenai + +import akka.stream.scaladsl.Sink +import io.cequence.openaiscala.anthropic.domain.Message +import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage} +import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings +import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory} +import io.cequence.openaiscala.domain.NonOpenAIModelId +import io.cequence.openaiscala.examples.ExampleBase + +import scala.concurrent.Future + +// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set +object AnthropicBedrockCreateMessageStreamed extends ExampleBase[AnthropicService] { + + override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock() + + val messages: Seq[Message] = Seq( + SystemMessage("You are a helpful assistant!"), + UserMessage("Start with the letter S followed by a quick story about Norway and finish with the letter E.") + ) + + private val modelId = "us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0 + + override protected def run: Future[_] = + service + .createMessageStreamed( + messages, + settings = AnthropicCreateMessageSettings( + model = modelId, + max_tokens = 4096 + ) + ) + .runWith( + Sink.foreach { response => + print(response.delta.text) + } + ) +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateCachedMessage.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateCachedMessage.scala index 2c0b939b..00d14e22 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateCachedMessage.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/AnthropicCreateCachedMessage.scala @@ -2,11 +2,11 @@ package io.cequence.openaiscala.examples.nonopenai import io.cequence.openaiscala.anthropic.domain.CacheControl.Ephemeral import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock -import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlockBase, SingleString} +import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage} import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings -import io.cequence.openaiscala.anthropic.domain.{Content, Message} +import io.cequence.openaiscala.anthropic.domain.Message import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory} import io.cequence.openaiscala.domain.NonOpenAIModelId import io.cequence.openaiscala.examples.ExampleBase diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionStreamedWithOpenAIAdapter.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionStreamedWithOpenAIAdapter.scala index e8b0113b..9e9b6f71 100644 --- a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionStreamedWithOpenAIAdapter.scala +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai/VertexAICreateChatCompletionStreamedWithOpenAIAdapter.scala @@ -15,7 +15,8 @@ object VertexAICreateChatCompletionStreamedWithOpenAIAdapter override val service: OpenAIChatCompletionStreamedService = ChatCompletionProvider.vertexAI - private val model = NonOpenAIModelId.gemini_1_5_flash_001 + // 2024-12-18: works only with us-central1 + private val model = NonOpenAIModelId.gemini_2_0_flash_exp private val messages = Seq( SystemMessage("You are a helpful assistant who makes jokes about Google. Use markdown"),