Skip to content

Commit

Permalink
Fireworks - document inlining
Browse files Browse the repository at this point in the history
  • Loading branch information
peterbanda committed Jan 13, 2025
1 parent 8f160cc commit 8936454
Show file tree
Hide file tree
Showing 7 changed files with 205 additions and 47 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
package io.cequence.openaiscala.examples

import java.awt.image.RenderedImage
import java.io.ByteArrayOutputStream
import java.util.Base64
import javax.imageio.ImageIO

trait BufferedImageHelper {

protected def imageBase64Source(
file: java.io.File
): String = {
val bufferedImage = ImageIO.read(file)
Base64.getEncoder.encodeToString(imageToBytes(bufferedImage, "jpeg"))
}

protected def imageToBytes(
image: RenderedImage,
format: String
): Array[Byte] = {
val baos = new ByteArrayOutputStream()
ImageIO.write(image, format, baos)
baos.flush()
val imageInByte = baos.toByteArray
baos.close()
imageInByte
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,20 @@ package io.cequence.openaiscala.examples
import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings

import java.awt.image.RenderedImage
import java.io.ByteArrayOutputStream
import java.util.Base64
import javax.imageio.ImageIO
import scala.concurrent.Future

object CreateChatCompletionVisionWithLocalFile extends Example {
object CreateChatCompletionVisionWithLocalFile extends Example with BufferedImageHelper {

// provide a local jpeg here
private val localImagePath = sys.env("EXAMPLE_IMAGE_PATH")
private val bufferedImage = ImageIO.read(new java.io.File(localImagePath))
private val imageBase64Source =
Base64.getEncoder.encodeToString(imageToBytes(bufferedImage, "jpeg"))
private lazy val localImagePath = sys.env("EXAMPLE_IMAGE_PATH")
private val imageSource = imageBase64Source(new java.io.File(localImagePath))

val messages: Seq[BaseMessage] = Seq(
SystemMessage("You are a helpful assistant."),
UserSeqMessage(
Seq(
TextContent("What is in this picture?"),
ImageURLContent(s"data:image/jpeg;base64,${imageBase64Source}")
ImageURLContent(s"data:image/jpeg;base64,${imageSource}")
)
)
)
Expand All @@ -32,22 +26,10 @@ object CreateChatCompletionVisionWithLocalFile extends Example {
.createChatCompletion(
messages,
settings = CreateChatCompletionSettings(
model = ModelId.gpt_4_vision_preview,
model = ModelId.gpt_4o,
temperature = Some(0),
max_tokens = Some(300)
)
)
.map(printMessageContent)

private def imageToBytes(
image: RenderedImage,
format: String
): Array[Byte] = {
val baos = new ByteArrayOutputStream()
ImageIO.write(image, format, baos)
baos.flush()
val imageInByte = baos.toByteArray
baos.close()
imageInByte
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,29 +8,25 @@ import io.cequence.openaiscala.anthropic.domain.response.CreateMessageResponse
import io.cequence.openaiscala.anthropic.domain.settings.AnthropicCreateMessageSettings
import io.cequence.openaiscala.anthropic.service.{AnthropicService, AnthropicServiceFactory}
import io.cequence.openaiscala.domain.NonOpenAIModelId
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.examples.{BufferedImageHelper, ExampleBase}

import java.awt.image.RenderedImage
import java.io.ByteArrayOutputStream
import java.util.Base64
import javax.imageio.ImageIO
import scala.concurrent.Future

// requires `openai-scala-anthropic-client` as a dependency
object AnthropicCreateMessageWithImage extends ExampleBase[AnthropicService] {
object AnthropicCreateMessageWithImage
extends ExampleBase[AnthropicService]
with BufferedImageHelper {

private val localImagePath = sys.env("EXAMPLE_IMAGE_PATH")
private val bufferedImage = ImageIO.read(new java.io.File(localImagePath))
private val imageBase64Source =
Base64.getEncoder.encodeToString(imageToBytes(bufferedImage, "jpeg"))
private lazy val localImagePath = sys.env("EXAMPLE_IMAGE_PATH")
private val imageSource = imageBase64Source(new java.io.File(localImagePath))

override protected val service: AnthropicService = AnthropicServiceFactory()

private val messages: Seq[Message] = Seq(
UserMessageContent(
Seq(
ContentBlockBase(TextBlock("Describe to me what is in the picture!")),
MediaBlock.jpeg(data = imageBase64Source)
MediaBlock.jpeg(data = imageSource)
)
)
)
Expand All @@ -46,18 +42,6 @@ object AnthropicCreateMessageWithImage extends ExampleBase[AnthropicService] {
)
.map(printMessageContent)

private def imageToBytes(
image: RenderedImage,
format: String
): Array[Byte] = {
val baos = new ByteArrayOutputStream()
ImageIO.write(image, format, baos)
baos.flush()
val imageInByte = baos.toByteArray
baos.close()
imageInByte
}

private def printMessageContent(response: CreateMessageResponse) = {
val text =
response.content.blocks.collect { case ContentBlockBase(TextBlock(text), _) => text }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ object CerebrasCreateChatCompletion extends ExampleBase[OpenAIChatCompletionServ
UserMessage("What is the weather like in Norway?")
)

private val modelId = NonOpenAIModelId.llama3_1_8b
private val modelId = NonOpenAIModelId.llama_3_3_70b

override protected def run: Future[_] =
service
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import scala.concurrent.Future

/**
* Requires `FIREWORKS_API_KEY` environment variable to be set
*
* Check out the website for more information:
* https://fireworks.ai/blog/document-inlining-launch
*/
object FireworksAIDocumentInlining extends ExampleBase[OpenAIChatCompletionService] {

private val fireworksModelPrefix = "accounts/fireworks/models/"
override val service: OpenAIChatCompletionService = ChatCompletionProvider.fireworks

val messages: Seq[BaseMessage] = Seq(
SystemMessage("You are a helpful assistant."),
UserSeqMessage(
Seq(
TextContent("What are the candidate's BA and MBA GPAs?"),
ImageURLContent(
"https://storage.googleapis.com/fireworks-public/test/sample_resume.pdf#transform=inline"
)
)
)
)

override protected def run: Future[_] =
service
.createChatCompletion(
messages,
settings = CreateChatCompletionSettings(
model = fireworksModelPrefix + NonOpenAIModelId.llama_v3p3_70b_instruct,
temperature = Some(0),
max_tokens = Some(1000)
)
)
.map(printMessageContent)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.{
ChatCompletionResponseFormatType,
CreateChatCompletionSettings
}
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService
import play.api.libs.json.Json
import io.cequence.openaiscala.JsonFormats.jsonSchemaFormat

import scala.concurrent.Future

/**
* Requires `FIREWORKS_API_KEY` environment variable to be set
*
* Check out the website for more information:
* https://fireworks.ai/blog/document-inlining-launch
*/
object FireworksAIDocumentInliningJson extends ExampleBase[OpenAIChatCompletionService] {

private val fireworksModelPrefix = "accounts/fireworks/models/"
override val service: OpenAIChatCompletionService = ChatCompletionProvider.fireworks

val messages: Seq[BaseMessage] = Seq(
SystemMessage("You are a helpful assistant."),
UserSeqMessage(
Seq(
TextContent(
"Extract the list of professional associations and accomplishments into JSON"
),
ImageURLContent(
"https://storage.googleapis.com/fireworks-public/test/sample_resume.pdf#transform=inline"
)
)
)
)

private val schema: JsonSchema = JsonSchema.Object(
properties = Seq(
"professional_associations" -> JsonSchema.Array(JsonSchema.String()),
"accomplishment" -> JsonSchema.Array(JsonSchema.String())
),
required = Seq("professional_associations", "accomplishment")
)

override protected def run: Future[_] =
service
.createChatCompletion(
messages,
settings = CreateChatCompletionSettings(
model = fireworksModelPrefix + NonOpenAIModelId.llama_v3p3_70b_instruct,
temperature = Some(0),
max_tokens = Some(1000),
// response_format_type = Some(ChatCompletionResponseFormatType.json_object),
extra_params = Map(
"response_format" -> Json.obj(
"type" -> ChatCompletionResponseFormatType.json_object.toString,
"schema" -> Json.toJson(schema)
)
)
)
)
.map(printMessageContent)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
package io.cequence.openaiscala.examples.nonopenai

import io.cequence.openaiscala.domain._
import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings
import io.cequence.openaiscala.examples.ExampleBase
import io.cequence.openaiscala.service.OpenAIChatCompletionService

import java.nio.file.{Files, Paths}
import java.util.Base64
import scala.concurrent.Future

/**
* Requires `FIREWORKS_API_KEY` and `EXAMPLE_PDF_PATH` environment variables to be set
*
* Check out the website for more information:
* https://fireworks.ai/blog/document-inlining-launch
*/
object FireworksAIDocumentInliningLocal extends ExampleBase[OpenAIChatCompletionService] {

private lazy val localPdfPath = sys.env("EXAMPLE_PDF_PATH")

private val base64Pdf = {
val pdfBytes = Files.readAllBytes(Paths.get(localPdfPath))
Base64.getEncoder.encodeToString(pdfBytes)
}

private val fireworksModelPrefix = "accounts/fireworks/models/"
override val service: OpenAIChatCompletionService = ChatCompletionProvider.fireworks

val messages: Seq[BaseMessage] = Seq(
SystemMessage("You are a helpful assistant."),
UserSeqMessage(
Seq(
TextContent("What are the candidate's BA and MBA GPAs?"),
ImageURLContent(
s"data:application/pdf;base64,${base64Pdf}#transform=inline"
)
)
)
)

override protected def run: Future[_] =
service
.createChatCompletion(
messages,
settings = CreateChatCompletionSettings(
model =
fireworksModelPrefix + NonOpenAIModelId.llama_v3p3_70b_instruct, // phi_3_vision_128k_instruct
temperature = Some(0),
max_tokens = Some(1000)
)
)
.map(printMessageContent)
}

0 comments on commit 8936454

Please sign in to comment.