Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#if canImport(FoundationModels)
import Foundation
import FoundationModels

@available(iOS 26.0, macOS 26.0, *)
@available(tvOS, unavailable)
@available(watchOS, unavailable)
extension FoundationModels.LanguageModelSession: ModelSession {
func respond<Content>(to prompt: [any PartsRepresentable], schema: FirebaseAI.GenerationSchema?,
generating type: Content.Type, includeSchemaInPrompt: Bool,
options: GenerationConfig?) async throws
-> GenerativeModelSession.Response<Content> {
let parts = ModelContent(parts: prompt)
let promptParts = parts.internalParts.map { part in
guard !(part.isThought ?? false) else { fatalError() }
guard let data = part.data else { fatalError() }
guard case let .text(string) = data else { fatalError() }

return Prompt(string)
}
let prompt = Prompt {
for part in promptParts {
part
}
}

if type == String.self {
let response = try await respond(to: prompt)

let rawContent = FirebaseAI.GeneratedContent(
kind: response.rawContent.kind,
id: FirebaseAI.GenerationID(responseID: nil, generationID: response.rawContent.id),
isComplete: response.rawContent.isComplete
)

let modelContent = ModelContent(
role: "model",
parts: [InternalPart(.text(response.content), isThought: false, thoughtSignature: nil)]
)
let candidate = Candidate(
content: modelContent,
safetyRatings: [],
finishReason: nil,
citationMetadata: nil
)
let rawResponse = GenerateContentResponse(
candidates: [candidate],
modelVersion: SystemLanguageModel.modelName
)

guard let content = response.content as? Content else {
fatalError()
}

return GenerativeModelSession.Response(
content: content,
rawContent: rawContent,
rawResponse: rawResponse
)
} else if let contentMetatype = type as? (any FoundationModels.Generable.Type) {
// Generic helper to explicitly bind the opened existential type to `T`.
func fetchResponse<T: FoundationModels.Generable>(_ generableType: T
.Type) async throws -> GenerativeModelSession.Response<Content> {
let response = try await respond(
to: prompt,
generating: generableType,
includeSchemaInPrompt: includeSchemaInPrompt
)

let rawContent = FirebaseAI.GeneratedContent(
kind: response.rawContent.kind,
id: FirebaseAI.GenerationID(
responseID: UUID().uuidString,
generationID: response.rawContent.id
),
isComplete: response.rawContent.isComplete
)
let modelContent = ModelContent(
role: "model",
parts: [
InternalPart(
.text(response.rawContent.jsonString),
isThought: false,
thoughtSignature: nil
),
]
)
let candidate = Candidate(
content: modelContent,
safetyRatings: [],
finishReason: nil,
citationMetadata: nil
)
let rawResponse = GenerateContentResponse(candidates: [candidate])

// Cast the generated content back to the outer `Content` type.
guard let finalContent = response.content as? Content else {
fatalError("Expected \(Content.self) but received \(T.self)")
}

return GenerativeModelSession.Response(
content: finalContent,
rawContent: rawContent,
rawResponse: rawResponse
)
}

// Call the helper, which opens `contentMetatype` and passes it as `T`.
return try await fetchResponse(contentMetatype)

} else {
fatalError("Unsupported type for generation: \(type)")
}
}

func streamResponse<Content, PartialContent>(to prompt: [any PartsRepresentable],
schema: FirebaseAI.GenerationSchema?,
generating type: Content.Type,
includeSchemaInPrompt: Bool,
options: GenerationConfig?)
throws -> sending GenerativeModelSession.ResponseStream<Content, PartialContent> {
// TODO: Create a new error type
throw NSError(
domain: Constants.baseErrorDomain,
code: 0,
userInfo: [NSLocalizedDescriptionKey: "Hybrid streaming support is not yet implemented."]
)
}
}
#endif // canImport(FoundationModels)
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.

#if canImport(FoundationModels)
import FoundationModels

// TODO: Wrap `FoundationModels.SystemLanguageModel` in type-erased box to simplify iOS 15 hybrid.
@available(iOS 26.0, macOS 26.0, *)
@available(tvOS, unavailable)
@available(watchOS, unavailable)
extension FoundationModels.SystemLanguageModel: LanguageModel {
static let modelName = "apple-foundation-models-system-language-model"

var modelName: String { FoundationModels.SystemLanguageModel.modelName }

func startSession(tools: [any ToolRepresentable]?, instructions: String?) -> any ModelSession {
var afmTools = [any FoundationModels.Tool]()
for tool in tools ?? [] {
let functionDeclarations = tool.toolRepresentation.functionDeclarations ?? []
for functionDeclaration in functionDeclarations {
if case let .foundationModels(afmTool) = functionDeclaration.kind {
guard let afmTool = afmTool as? (any FoundationModels.Tool) else {
fatalError("Unexpected tool type: \(tool)")
}
afmTools.append(afmTool)
}
}
}
return LanguageModelSession(tools: afmTools, instructions: instructions)
}
}
#endif // canImport(FoundationModels)
29 changes: 23 additions & 6 deletions FirebaseAI/Sources/FirebaseAI.swift
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,9 @@ public final class FirebaseAI: Sendable {

// TODO: Remove the `#if compiler(>=6.2.3)` when Xcode 26.2 is the minimum supported version.
#if compiler(>=6.2.3)

// TODO: Add public API for instantiating models to use with hybrid GenerativeModelSession.

/// Creates a new `GenerativeModelSession` with the given model.
///
/// - Important: **Public Preview** - This API is a public preview and may be subject to change.
Expand All @@ -122,14 +125,28 @@ public final class FirebaseAI: Sendable {
/// - instructions: System instructions that direct the model's behavior.
public func generativeModelSession(model: String, tools: [any ToolRepresentable]? = nil,
instructions: String? = nil) -> GenerativeModelSession {
let tools = tools?.map { $0.toolRepresentation }
let model = generativeModel(
modelName: model,
tools: tools,
systemInstruction: instructions.map { ModelContent(role: "system", parts: $0) }
let geminiModel = geminiModel(modelName: model)

return generativeModelSession(models: [geminiModel], tools: tools, instructions: instructions)
}

// TODO: Update this testing API for hybrid GenerativeModelSession.
func geminiModel(modelName: String, safetySettings: [SafetySetting]? = nil,
toolConfig: ToolConfig? = nil) -> any LanguageModel {
return GeminiModel(
modelName: modelName,
modelResourceName: modelResourceName(modelName: modelName),
firebaseInfo: firebaseInfo,
apiConfig: apiConfig,
safetySettings: safetySettings,
toolConfig: toolConfig
)
}

return GenerativeModelSession(model: model)
// TODO: Update this testing API for hybrid GenerativeModelSession.
func generativeModelSession(models: [any LanguageModel], tools: [any ToolRepresentable]? = nil,
instructions: String? = nil) -> GenerativeModelSession {
return GenerativeModelSession(models: models, tools: tools, instructions: instructions)
}

#if canImport(FoundationModels)
Expand Down
15 changes: 15 additions & 0 deletions FirebaseAI/Sources/GenerateContentResponse.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,8 @@ public struct GenerateContentResponse: Sendable {

let responseID: String?

let modelVersion: String?

/// The response's content as text, if it exists.
///
/// - Note: This does not include thought summaries; see ``thoughtSummary`` for more details.
Expand Down Expand Up @@ -124,6 +126,17 @@ public struct GenerateContentResponse: Sendable {
self.promptFeedback = promptFeedback
self.usageMetadata = usageMetadata
responseID = nil
modelVersion = nil
}

init(candidates: [Candidate], promptFeedback: PromptFeedback? = nil,
usageMetadata: UsageMetadata? = nil, responseID: String? = nil,
modelVersion: String? = nil) {
self.candidates = candidates
self.promptFeedback = promptFeedback
self.usageMetadata = usageMetadata
self.responseID = responseID
self.modelVersion = modelVersion
}

func text(isThought: Bool) -> String? {
Expand Down Expand Up @@ -448,6 +461,7 @@ extension GenerateContentResponse: Decodable {
case promptFeedback
case usageMetadata
case responseID = "responseId"
case modelVersion
}

public init(from decoder: Decoder) throws {
Expand All @@ -474,6 +488,7 @@ extension GenerateContentResponse: Decodable {
promptFeedback = try container.decodeIfPresent(PromptFeedback.self, forKey: .promptFeedback)
usageMetadata = try container.decodeIfPresent(UsageMetadata.self, forKey: .usageMetadata)
responseID = try container.decodeIfPresent(String.self, forKey: .responseID)
modelVersion = try container.decodeIfPresent(String.self, forKey: .modelVersion)
}
}

Expand Down
Loading
Loading