Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion ollama/_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ class ShowResponse(SubscriptableBaseModel):

details: Optional[ModelDetails] = None

modelinfo: Optional[Mapping[str, Any]] = Field(alias='model_info')
modelinfo: Optional[Mapping[str, Any]] = Field(default=None, alias='model_info')

parameters: Optional[str] = None

Expand Down
58 changes: 57 additions & 1 deletion tests/test_type_serialization.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import pytest

from ollama._types import CreateRequest, Image
from ollama._types import CreateRequest, Image, ShowResponse


def test_image_serialization_bytes():
Expand Down Expand Up @@ -92,3 +92,59 @@ def test_create_request_serialization_license_list():
request = CreateRequest(model='test-model', license=['MIT', 'Apache-2.0'])
serialized = request.model_dump()
assert serialized['license'] == ['MIT', 'Apache-2.0']


def test_show_response_without_model_info():
"""
Test that ShowResponse can be created without model_info field.

This is a regression test for issue #607 where certain cloud models
(e.g., glm-4.7:cloud, qwen3-next:80b-cloud, deepseek-v3.2:cloud) return
responses without the model_info field, causing a ValidationError.
"""
# Response data without model_info field (as returned by some cloud models)
response_data = {
'modelfile': '# Modelfile generated by "ollama show"',
'template': '{{ .Prompt }}',
'details': {
'parent_model': '',
'format': 'gguf',
'family': 'glm',
'families': ['glm'],
'parameter_size': '9.4B',
'quantization_level': 'Q4_K_M',
},
'capabilities': ['completion'],
'modified_at': '2025-01-01T00:00:00Z',
}

# This should not raise a ValidationError
response = ShowResponse.model_validate(response_data)

assert response.modelfile == '# Modelfile generated by "ollama show"'
assert response.template == '{{ .Prompt }}'
assert response.modelinfo is None # model_info was not provided
assert response.capabilities == ['completion']


def test_show_response_with_model_info():
"""
Test that ShowResponse still works correctly when model_info is provided.
"""
response_data = {
'modelfile': '# Modelfile',
'template': '{{ .Prompt }}',
'model_info': {
'general.architecture': 'llama',
'general.parameter_count': 7000000000,
},
'capabilities': ['completion'],
'modified_at': '2025-01-01T00:00:00Z',
}

response = ShowResponse.model_validate(response_data)

assert response.modelfile == '# Modelfile'
assert response.modelinfo is not None
assert response.modelinfo['general.architecture'] == 'llama'
assert response.modelinfo['general.parameter_count'] == 7000000000