Skip to content

Commit 2aae8d9

Browse files
committed
Added response format to the chat completions request body
1 parent 0464ca2 commit 2aae8d9

File tree

3 files changed

+21
-0
lines changed

3 files changed

+21
-0
lines changed

Sources/OpenAI/Chat/ChatCompletionsBody.swift

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,9 @@ public extension ChatCompletions {
6464
///
6565
/// [See more information about frequency and presence penalties.](/docs/api-reference/parameter-details)
6666
public var frequencyPenalty: Double?
67+
/// An object specifying the format that the model must output. Compatible with GPT-4 Turbo and gpt-3.5-turbo-1106.
68+
/// Setting `type` to `.jsonObject` enables JSON mode, which guarantees the message the model generates is valid JSON.
69+
public var responseFormat: ResponseFormat?
6770
/// Modify the likelihood of specified tokens appearing in the completion.
6871
///
6972
/// Accepts a json object that maps tokens (specified by their token ID in the tokenizer) to an associated bias value from -100 to 100. Mathematically, the bias is added to the logits generated by the model prior to sampling. The exact effect will vary per model, but values between -1 and 1 should decrease or increase likelihood of selection; values like -100 or 100 should result in a ban or exclusive selection of the relevant token.

Sources/OpenAI/Chat/ChatWrapper.swift

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ public struct ChatWrapper {
2121
maxTokens: Int? = nil,
2222
presencePenalty: Double? = nil,
2323
frequencyPenalty: Double? = nil,
24+
responseFormat: ResponseFormat? = nil,
2425
logitBias: [Int: Int]? = nil,
2526
user: String? = nil
2627
) async throws -> ChatCompletions.Response {
@@ -35,6 +36,7 @@ public struct ChatWrapper {
3536
maxTokens: maxTokens,
3637
presencePenalty: presencePenalty,
3738
frequencyPenalty: frequencyPenalty,
39+
responseFormat: responseFormat,
3840
logitBias: logitBias,
3941
user: user
4042
)
@@ -58,6 +60,7 @@ public struct ChatWrapper {
5860
maxTokens: Int? = nil,
5961
presencePenalty: Double? = nil,
6062
frequencyPenalty: Double? = nil,
63+
responseFormat: ResponseFormat? = nil,
6164
logitBias: [Int: Int]? = nil,
6265
user: String? = nil
6366
) -> AsyncThrowingStream<ChatCompletions.Response.Chunk, Error> {
@@ -72,6 +75,7 @@ public struct ChatWrapper {
7275
maxTokens: maxTokens,
7376
presencePenalty: presencePenalty,
7477
frequencyPenalty: frequencyPenalty,
78+
responseFormat: responseFormat,
7579
logitBias: logitBias,
7680
user: user
7781
)
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
//
2+
// ResponseFormat.swift
3+
// OpenAI
4+
//
5+
// Created by Firdavs Khaydarov on 27/01/2024.
6+
//
7+
8+
public struct ResponseFormat: Encodable {
9+
public enum `Type`: String, Encodable {
10+
case jsonObject = "json_object"
11+
}
12+
13+
public let type: `Type`
14+
}

0 commit comments

Comments
 (0)