Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions src/google/adk/agents/llm_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ class LlmAgent(BaseAgent):
"""The additional content generation configurations.

NOTE: not all fields are usable, e.g. tools must be configured via `tools`,
thinking_config must be configured via `planner` in LlmAgent.
thinking_config can be configured here or via the `planner`. If both are set, the planner's configuration takes precedence.

For example: use this config to adjust model temperature, configure safety
settings, etc.
Expand Down Expand Up @@ -849,8 +849,6 @@ def validate_generate_content_config(
) -> types.GenerateContentConfig:
if not generate_content_config:
return types.GenerateContentConfig()
if generate_content_config.thinking_config:
raise ValueError('Thinking config should be set via LlmAgent.planner.')
if generate_content_config.tools:
raise ValueError('All tools must be set via LlmAgent.tools.')
if generate_content_config.system_instruction:
Expand All @@ -863,6 +861,23 @@ def validate_generate_content_config(
)
return generate_content_config

@override
def model_post_init(self, __context: Any) -> None:
"""Provides a warning if multiple thinking configurations are found."""
super().model_post_init(__context)

# Note: Using getattr to check both locations for thinking_config
if getattr(
self.generate_content_config, 'thinking_config', None
) and getattr(self.planner, 'thinking_config', None):
warnings.warn(
'Both `thinking_config` in `generate_content_config` and a '
'planner with `thinking_config` are provided. The '
"planner's configuration will take precedence.",
UserWarning,
stacklevel=3,
)

@classmethod
@experimental
def _resolve_tools(
Expand Down
10 changes: 10 additions & 0 deletions src/google/adk/planners/built_in_planner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import logging
from typing import List
from typing import Optional

Expand All @@ -23,6 +26,8 @@
from ..models.llm_request import LlmRequest
from .base_planner import BasePlanner

logger = logging.getLogger('google_adk.' + __name__)


class BuiltInPlanner(BasePlanner):
"""The built-in planner that uses model's built-in thinking features.
Expand Down Expand Up @@ -57,6 +62,11 @@ def apply_thinking_config(self, llm_request: LlmRequest) -> None:
"""
if self.thinking_config:
llm_request.config = llm_request.config or types.GenerateContentConfig()
if llm_request.config.thinking_config:
logger.debug(
'Overwriting `thinking_config` from `generate_content_config` with '
'the one provided by the `BuiltInPlanner`.'
)
llm_request.config.thinking_config = self.thinking_config

@override
Expand Down
72 changes: 46 additions & 26 deletions src/google/adk/sessions/migration/README.md
Original file line number Diff line number Diff line change
@@ -1,31 +1,31 @@
# Process for a New Schema Version
# Process for Adding a New Schema Version

This document outlines the steps required to introduce a new database schema
version for `DatabaseSessionService`. Let's assume you are introducing schema
version `2.0`, migrating from `1.0`.

## 1. Update SQLAlchemy Models

Modify the SQLAlchemy model classes (`StorageSession`, `StorageEvent`,
`StorageAppState`, `StorageUserState`, `StorageMetadata`) in
`database_session_service.py` to reflect the new `2.0` schema. This could
involve adding new `mapped_column` definitions, changing types, or adding new
classes for new tables.
Fork from the latest schema version in `google/adk/sessions/schemas/` folder and
modify the SQLAlchemy model classes (`StorageSession`, `StorageEvent`,
`StorageAppState`, `StorageUserState`, `StorageMetadata`) to reflect the new
`2.0` schema, call it `v2.py`. Changes might be adding new `mapped_column`
definitions, changing types, or adding new classes for new tables.

## 2. Create a New Migration Script

You need to create a script that migrates data from schema `1.0` to `2.0`.

* Create a new file, for example:
`google/adk/sessions/migration/migrate_1_0_to_2_0.py`.
`google/adk/sessions/migration/migrate_from_1_0_to_2_0.py`.
* This script must contain a `migrate(source_db_url: str, dest_db_url: str)`
function, similar to `migrate_from_sqlalchemy_pickle.py`.
* Inside this function:
* Connect to the `source_db_url` (which has schema 1.0) and `dest_db_url`
engines using SQLAlchemy.
* **Important**: Create the tables in the destination database using the
new 2.0 schema definition by calling
`dss.Base.metadata.create_all(dest_engine)`.
`v2.Base.metadata.create_all(dest_engine)`.
* Read data from the source tables (schema 1.0). The recommended way to do
this without relying on outdated models is to use `sqlalchemy.text`,
like:
Expand All @@ -38,19 +38,19 @@ You need to create a script that migrates data from schema `1.0` to `2.0`.

* For each row read from the source, transform the data as necessary to
fit the `2.0` schema, and create an instance of the corresponding new
SQLAlchemy model (e.g., `dss.StorageSession(...)`).
SQLAlchemy model (e.g., `v2.StorageSession(...)`).
* Add these new `2.0` objects to the destination session, ideally using
`dest_session.merge()` to upsert.
* After migrating data for all tables, ensure the destination database is
marked with the new schema version:
marked with the new schema version using the `adk_internal_metadata`
table:

```python
from google.adk.sessions import database_session_service as dss
from google.adk.sessions.migration import _schema_check
from google.adk.sessions.migration import _schema_check_utils
...
dest_session.merge(
dss.StorageMetadata(
key=_schema_check.SCHEMA_VERSION_KEY,
v2.StorageMetadata(
key=_schema_check_utils.SCHEMA_VERSION_KEY,
value="2.0",
)
)
Expand All @@ -59,35 +59,38 @@ You need to create a script that migrates data from schema `1.0` to `2.0`.

## 3. Update Schema Version Constant

You need to update `CURRENT_SCHEMA_VERSION` in
`google/adk/sessions/migration/_schema_check.py` to reflect the new version:
You need to add the new version and update `LATEST_SCHEMA_VERSION` in
`google/adk/sessions/migration/_schema_check_utils.py` to reflect the new version:

```python
CURRENT_SCHEMA_VERSION = "2.0"
SCHEMA_VERSION_2_0 = "2.0"
LATEST_SCHEMA_VERSION = SCHEMA_VERSION_2_0
```

This will also update `LATEST_VERSION` in `migration_runner.py`, as it uses this
constant.

## 4. Register the New Migration in Migration Runner
## 4. Register the New Migration Script in Migration Runner

In `google/adk/sessions/migration/migration_runner.py`, import your new
migration script and add it to the `MIGRATIONS` dictionary. This tells the
runner how to get from version `1.0` to `2.0`. For example:

```python
from google.adk.sessions.migration import _schema_check
from google.adk.sessions.migration import _schema_check_utils
from google.adk.sessions.migration import migrate_from_sqlalchemy_pickle
from google.adk.sessions.migration import migrate_1_0_to_2_0
from google.adk.sessions.migration import migrate_from_1_0_to_2_0
...
MIGRATIONS = {
_schema_check.SCHEMA_VERSION_0_1_PICKLE: (
_schema_check.SCHEMA_VERSION_1_0_JSON,
# Previous migrations
_schema_check_utils.SCHEMA_VERSION_0_PICKLE: (
_schema_check_utils.SCHEMA_VERSION_1_JSON,
migrate_from_sqlalchemy_pickle.migrate,
),
_schema_check.SCHEMA_VERSION_1_0_JSON: (
"2.0",
migrate_1_0_to_2_0.migrate,
# Your new migration
_schema_check_utils.SCHEMA_VERSION_1_JSON: (
_schema_check_utils.SCHEMA_VERSION_2_0,
migrate_from_1_0_to_2_0.migrate,
),
}
```
Expand All @@ -100,10 +103,27 @@ creation), update the methods within `DatabaseSessionService` (`create_session`,
`get_session`, `append_event`, etc.) in `database_session_service.py`
accordingly.

The `DatabaseSessionService` is designed to be backward-compatible with the
previous schema for a few releases (at least 2). It detects the current database
schema, and if it's using the previous version of schema, it will still function
correctly. But for new databases, it will create tables using the latest schema.
Therefore, you should modify `_prepare_tables` method and the
DatabaseSessionService's methods (`create_session`, `get_session`,
`append_event`, etc.) to branch based on the `_db_schema_version` variable
accordingly.

## 6. CLI Command Changes

No changes are needed for the Click command definition in `cli_tools_click.py`.
The `adk migrate session` command calls `migration_runner.upgrade()`, which will
now automatically detect the source database version and apply the necessary
migration steps (e.g., `0.1 -> 1.0 -> 2.0`, or `1.0 -> 2.0`) to reach
`LATEST_VERSION`.
`LATEST_VERSION`.

## 7. Deprecate the Previous Schema

After a few releases (at least 2), remove the logic for the previous schema.
Only use the latest schema in the `DatabaseSessionService`, and raise an
Exception if detecting legacy schema versions. Keep the schema files like
`schemas/v1.py` and the migration scripts for documentation and not-yet-migrated
users.
64 changes: 56 additions & 8 deletions tests/unittests/agents/test_llm_agent_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

"""Unit tests for canonical_xxx fields in LlmAgent."""

import logging
from typing import Any
from typing import Optional
from unittest import mock
Expand All @@ -27,6 +28,7 @@
from google.adk.models.lite_llm import LiteLlm
from google.adk.models.llm_request import LlmRequest
from google.adk.models.registry import LLMRegistry
from google.adk.planners.built_in_planner import BuiltInPlanner
from google.adk.sessions.in_memory_session_service import InMemorySessionService
from google.adk.tools.google_search_tool import google_search
from google.adk.tools.google_search_tool import GoogleSearchTool
Expand Down Expand Up @@ -234,17 +236,35 @@ def _before_model_callback(
assert agent.before_model_callback is not None


def test_validate_generate_content_config_thinking_config_throw():
with pytest.raises(ValueError):
_ = LlmAgent(
name='test_agent',
generate_content_config=types.GenerateContentConfig(
thinking_config=types.ThinkingConfig()
),
)
def test_validate_generate_content_config_thinking_config_allow():
"""Tests that thinking_config is now allowed directly in the agent init."""
agent = LlmAgent(
name='test_agent',
generate_content_config=types.GenerateContentConfig(
thinking_config=types.ThinkingConfig(include_thoughts=True)
),
)
assert agent.generate_content_config.thinking_config.include_thoughts is True


def test_thinking_config_precedence_warning():
"""Tests that a UserWarning is issued when both manual config and planner exist."""

config = types.GenerateContentConfig(
thinking_config=types.ThinkingConfig(include_thoughts=True)
)
planner = BuiltInPlanner(
thinking_config=types.ThinkingConfig(include_thoughts=True)
)

with pytest.warns(
UserWarning, match="planner's configuration will take precedence"
):
LlmAgent(name='test_agent', generate_content_config=config, planner=planner)


def test_validate_generate_content_config_tools_throw():
"""Tests that tools cannot be set directly in config."""
with pytest.raises(ValueError):
_ = LlmAgent(
name='test_agent',
Expand All @@ -255,6 +275,7 @@ def test_validate_generate_content_config_tools_throw():


def test_validate_generate_content_config_system_instruction_throw():
"""Tests that system instructions cannot be set directly in config."""
with pytest.raises(ValueError):
_ = LlmAgent(
name='test_agent',
Expand All @@ -265,6 +286,8 @@ def test_validate_generate_content_config_system_instruction_throw():


def test_validate_generate_content_config_response_schema_throw():
"""Tests that response schema cannot be set directly in config."""

class Schema(BaseModel):
pass

Expand Down Expand Up @@ -471,3 +494,28 @@ def test_agent_with_litellm_string_model(model_name):
agent = LlmAgent(name='test_agent', model=model_name)
assert isinstance(agent.canonical_model, LiteLlm)
assert agent.canonical_model.model == model_name


def test_builtin_planner_overwrite_logging(caplog):
"""Tests that the planner logs an DEBUG message when overwriting a config."""

planner = BuiltInPlanner(
thinking_config=types.ThinkingConfig(include_thoughts=True)
)

# Create a request that already has a thinking_config
req = LlmRequest(
contents=[],
config=types.GenerateContentConfig(
thinking_config=types.ThinkingConfig(include_thoughts=True)
),
)

with caplog.at_level(
logging.DEBUG, logger='google_adk.google.adk.planners.built_in_planner'
):
planner.apply_thinking_config(req)
assert (
'Overwriting `thinking_config` from `generate_content_config`'
in caplog.text
)
Loading