Skip to content

Commit

Permalink
Anthropic bedrock chat completion examples
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Dec 20, 2024
1 parent bf9f759 commit 0029d00
Show file tree
Hide file tree
Showing 6 changed files with 160 additions and 3 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
package io.cequence.openaiscala.examples.nonopenai

import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.StreamedServiceTypes.OpenAIChatCompletionStreamedService

import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency and `ANTHROPIC_API_KEY` environment variable to be set
object AnthropicBedrockCreateChatCompletionStreamedWithOpenAIAdapter
extends ExampleBase[OpenAIChatCompletionStreamedService] {

override val service: OpenAIChatCompletionStreamedService =
ChatCompletionProvider.anthropicBedrock

private val messages = Seq(
SystemMessage("You are a helpful assistant."),
UserMessage("What is the weather like in Norway?")
)
override protected def run: Future[_] = {
service
.createChatCompletionStreamed(
messages = messages,
settings = CreateChatCompletionSettings(
model = NonOpenAIModelId.claude_3_5_sonnet_20240620
)
)
.runWith(
Sink.foreach { response =>
print(response.choices.headOption.flatMap(_.delta.content).getOrElse(""))
}
)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.domain.{NonOpenAIModelId, SystemMessage, UserMessage}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
object AnthropicBedrockCreateChatCompletionWithOpenAIAdapter
extends ExampleBase[OpenAIChatCompletionService] {

override val service: OpenAIChatCompletionService = ChatCompletionProvider.anthropicBedrock

private val messages = Seq(
SystemMessage("You are a drunk assistant!"),
UserMessage("What is the weather like in Norway?")
)

private val modelId =
// using 'us.' prefix because of the cross-region inference (enabled only in the us)
"us." + NonOpenAIModelId.bedrock_claude_3_5_haiku_20241022_v1_0

override protected def run: Future[_] =
service
.createChatCompletion(
messages = messages,
settings = CreateChatCompletionSettings(modelId)
)
.map { content =>
println(content.choices.headOption.map(_.message.content).getOrElse("N/A"))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
import io.cequence.openaiscala.domain.NonOpenAIModelId
import io.cequence.openaiscala.examples.ExampleBase

import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
object AnthropicBedrockCreateMessage extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock()

private val messages: Seq[Message] = Seq(
SystemMessage("You are a drunk assistant!"),
UserMessage("What is the weather like in Norway?")
)

private val modelId =
// using 'us.' prefix because of the cross-region inference (enabled only in the us)
"us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0

override protected def run: Future[_] =
service
.createMessage(
messages,
settings = AnthropicCreateMessageSettings(
model = modelId,
max_tokens = 4096,
temperature = Some(1.0)
)
)
.map(printMessageContent)

private def printMessageContent(response: CreateMessageResponse) = {
val text =
response.content.blocks.collect { case ContentBlockBase(TextBlock(text), _) => text }
.mkString(" ")
println(text)
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package io.cequence.openaiscala.examples.nonopenai

import akka.stream.scaladsl.Sink
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
import io.cequence.openaiscala.domain.NonOpenAIModelId
import io.cequence.openaiscala.examples.ExampleBase

import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency and 'AWS_BEDROCK_ACCESS_KEY', 'AWS_BEDROCK_SECRET_KEY', 'AWS_BEDROCK_REGION' environment variable to be set
object AnthropicBedrockCreateMessageStreamed extends ExampleBase[AnthropicService] {

override protected val service: AnthropicService = AnthropicServiceFactory.forBedrock()

val messages: Seq[Message] = Seq(
SystemMessage("You are a helpful assistant!"),
UserMessage("Start with the letter S followed by a quick story about Norway and finish with the letter E.")
)

private val modelId = "us." + NonOpenAIModelId.bedrock_claude_3_5_sonnet_20241022_v2_0

override protected def run: Future[_] =
service
.createMessageStreamed(
messages,
settings = AnthropicCreateMessageSettings(
model = modelId,
max_tokens = 4096
)
)
.runWith(
Sink.foreach { response =>
print(response.delta.text)
}
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@ package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.anthropic.domain.CacheControl.Ephemeral
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlock.TextBlock
import io.cequence.openaiscala.anthropic.domain.Content.{ContentBlockBase, SingleString}
import io.cequence.openaiscala.anthropic.domain.Content.ContentBlockBase
import io.cequence.openaiscala.anthropic.domain.Message.{SystemMessage, UserMessage}
import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.domain.{Content, Message}
import io.cequence.openaiscala.anthropic.domain.Message
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
import io.cequence.openaiscala.domain.NonOpenAIModelId
import io.cequence.openaiscala.examples.ExampleBase
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ object VertexAICreateChatCompletionStreamedWithOpenAIAdapter

override val service: OpenAIChatCompletionStreamedService = ChatCompletionProvider.vertexAI

private val model = NonOpenAIModelId.gemini_1_5_flash_001
// 2024-12-18: works only with us-central1
private val model = NonOpenAIModelId.gemini_2_0_flash_exp

private val messages = Seq(
SystemMessage("You are a helpful assistant who makes jokes about Google. Use markdown"),
Expand Down

0 comments on commit 0029d00

Please sign in to comment.