From d2db659dfc0b6dc8feddcb5056c8f7ddd5b83ee7 Mon Sep 17 00:00:00 2001 From: peterbanda Date: Mon, 27 Nov 2023 11:18:36 +0100 Subject: [PATCH] New examples: CreateChatCompletion, GPTVisionWithURL, and GPTVisionWithLocalFile --- .../domain/response/FileInfo.scala | 4 +- .../examples/CreateChatCompletion.scala | 26 +++++++++ .../examples/GPTVisionWithLocalFile.scala | 53 +++++++++++++++++++ .../examples/GPTVisionWithURL.scala | 33 ++++++++++++ 4 files changed, 115 insertions(+), 1 deletion(-) create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithLocalFile.scala create mode 100644 openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithURL.scala diff --git a/openai-core/src/main/scala/io/cequence/openaiscala/domain/response/FileInfo.scala b/openai-core/src/main/scala/io/cequence/openaiscala/domain/response/FileInfo.scala index c02a1fcf..c260c2a1 100644 --- a/openai-core/src/main/scala/io/cequence/openaiscala/domain/response/FileInfo.scala +++ b/openai-core/src/main/scala/io/cequence/openaiscala/domain/response/FileInfo.scala @@ -8,10 +8,12 @@ case class FileInfo( created_at: ju.Date, updated_at: Option[ju.Date], filename: String, + // The intended purpose of the file. + // Supported values are fine-tune, fine-tune-results, assistants, and assistants_output. purpose: String, status: String, // uploaded, processed, pending, error, deleting or deleted status_details: Option[String], - statistics: Option[FileStatistics] + statistics: Option[FileStatistics] // provided by Azure ) case class FileStatistics( diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala new file mode 100644 index 00000000..46b3a5d3 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/CreateChatCompletion.scala @@ -0,0 +1,26 @@ +package io.cequence.openaiscala.examples + +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings +import io.cequence.openaiscala.domain._ + +import scala.concurrent.Future + +object CreateChatCompletion extends Example { + + val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserMessage("What is the weather like in Norway?") + ) + + override protected def run: Future[_] = + service + .createChatCompletion( + messages = messages, + settings = CreateChatCompletionSettings( + model = ModelId.gpt_4_turbo_preview, + temperature = Some(0), + max_tokens = Some(100) + ) + ) + .map(printMessageContent) +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithLocalFile.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithLocalFile.scala new file mode 100644 index 00000000..4d1070f1 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithLocalFile.scala @@ -0,0 +1,53 @@ +package io.cequence.openaiscala.examples + +import io.cequence.openaiscala.domain._ +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings + +import java.awt.image.RenderedImage +import java.io.ByteArrayOutputStream +import java.util.Base64 +import javax.imageio.ImageIO +import scala.concurrent.Future + +object GPTVisionWithLocalFile extends Example { + + // provide a local jpeg here + private val localImagePath = sys.env("EXAMPLE_IMAGE_PATH") + private val bufferedImage = ImageIO.read(new java.io.File(localImagePath)) + private val imageBase64Source = + Base64.getEncoder.encodeToString(imageToBytes(bufferedImage, "jpeg")) + + val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserSeqMessage( + Seq( + TextContent("What is in this picture?"), + ImageURLContent(s"data:image/jpeg;base64,${imageBase64Source}") + ) + ) + ) + + override protected def run: Future[_] = + service + .createChatCompletion( + messages = messages, + settings = CreateChatCompletionSettings( + model = ModelId.gpt_4_vision_preview, + temperature = Some(0), + max_tokens = Some(300) + ) + ) + .map(printMessageContent) + + private def imageToBytes( + image: RenderedImage, + format: String + ): Array[Byte] = { + val baos = new ByteArrayOutputStream() + ImageIO.write(image, format, baos) + baos.flush() + val imageInByte = baos.toByteArray + baos.close() + imageInByte + } +} diff --git a/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithURL.scala b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithURL.scala new file mode 100644 index 00000000..029f1392 --- /dev/null +++ b/openai-examples/src/main/scala/io/cequence/openaiscala/examples/GPTVisionWithURL.scala @@ -0,0 +1,33 @@ +package io.cequence.openaiscala.examples + +import io.cequence.openaiscala.domain._ +import io.cequence.openaiscala.domain.settings.CreateChatCompletionSettings + +import scala.concurrent.Future + +object GPTVisionWithURL extends Example { + + val messages = Seq( + SystemMessage("You are a helpful assistant."), + UserSeqMessage( + Seq( + TextContent("What is in this picture?"), + ImageURLContent( + "https://upload.wikimedia.org/wikipedia/commons/d/df/Hefeweizen_Glass.jpg" + ) + ) + ) + ) + + override protected def run: Future[_] = + service + .createChatCompletion( + messages = messages, + settings = CreateChatCompletionSettings( + model = ModelId.gpt_4_vision_preview, + temperature = Some(0), + max_tokens = Some(300) + ) + ) + .map(printMessageContent) +}