From 40758c4c6c456b3104e51b3a3c55ac54064a0750 Mon Sep 17 00:00:00 2001 From: Leonid Kuligin Date: Tue, 7 Jan 2025 15:02:05 +0100 Subject: [PATCH] added google_search_tool for gemini-2 (#677) --- .../langchain_google_vertexai/chat_models.py | 17 +++++++++++------ .../functions_utils.py | 2 ++ libs/vertexai/poetry.lock | 2 +- libs/vertexai/pyproject.toml | 2 +- .../integration_tests/test_anthropic_cache.py | 4 ++++ .../tests/integration_tests/test_chat_models.py | 1 + 6 files changed, 20 insertions(+), 8 deletions(-) diff --git a/libs/vertexai/langchain_google_vertexai/chat_models.py b/libs/vertexai/langchain_google_vertexai/chat_models.py index 4d61d91d..9f341a3b 100644 --- a/libs/vertexai/langchain_google_vertexai/chat_models.py +++ b/libs/vertexai/langchain_google_vertexai/chat_models.py @@ -652,12 +652,7 @@ class ChatVertexAI(_VertexAICommon, BaseChatModel): """Google Cloud Vertex AI chat model integration. Setup: - You must have the langchain-google-vertexai Python package installed - .. code-block:: bash - - pip install -U langchain-google-vertexai - - And either: + You must either: - Have credentials configured for your environment (gcloud, workload identity, etc...) - Store the path to a service account JSON file as the GOOGLE_APPLICATION_CREDENTIALS environment variable @@ -803,6 +798,16 @@ class GetPopulation(BaseModel): See ``ChatVertexAI.bind_tools()`` method for more. + Use Search with Gemini 2: + .. code-block:: python + + import google.cloud.aiplatform_v1beta1.types import Tool as VertexTool + llm = ChatVertexAI(model="gemini-2.0-flash-exp") + resp = llm.invoke( + "When is the next total solar eclipse in US?", + tools=[VertexTool(google_search={})], + ) + Structured output: .. code-block:: python diff --git a/libs/vertexai/langchain_google_vertexai/functions_utils.py b/libs/vertexai/langchain_google_vertexai/functions_utils.py index f5422b40..3966573e 100644 --- a/libs/vertexai/langchain_google_vertexai/functions_utils.py +++ b/libs/vertexai/langchain_google_vertexai/functions_utils.py @@ -283,6 +283,8 @@ def _format_to_gapic_tool(tools: _ToolsType) -> gapic.Tool: gapic_tool.google_search_retrieval = rt.google_search_retrieval if "function_declarations" in rt: gapic_tool.function_declarations.extend(rt.function_declarations) + if "google_search" in rt: + gapic_tool.google_search = rt.google_search elif isinstance(tool, dict): # not _ToolDictLike if not any( diff --git a/libs/vertexai/poetry.lock b/libs/vertexai/poetry.lock index 169081fa..f1c4c3a5 100644 --- a/libs/vertexai/poetry.lock +++ b/libs/vertexai/poetry.lock @@ -2935,4 +2935,4 @@ mistral = ["langchain-mistralai"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "c6e2732767b069713395406bd4bd1ddc145e5a51001f82ac9aecee218a8c02ac" +content-hash = "2f8e8db77aa720f190a8e2b7189eb3dfc1990b3d14cf04c5961f8c39b8f5a0d8" diff --git a/libs/vertexai/pyproject.toml b/libs/vertexai/pyproject.toml index 1f0b47bd..83b2295b 100644 --- a/libs/vertexai/pyproject.toml +++ b/libs/vertexai/pyproject.toml @@ -13,7 +13,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.9,<4.0" langchain-core = ">=0.3.27,<0.4" -google-cloud-aiplatform = "^1.73.0" +google-cloud-aiplatform = "^1.75.0" google-cloud-storage = "^2.18.0" # optional dependencies anthropic = { extras = ["vertexai"], version = ">=0.35.0,<1", optional = true } diff --git a/libs/vertexai/tests/integration_tests/test_anthropic_cache.py b/libs/vertexai/tests/integration_tests/test_anthropic_cache.py index 9399331b..fc11e134 100644 --- a/libs/vertexai/tests/integration_tests/test_anthropic_cache.py +++ b/libs/vertexai/tests/integration_tests/test_anthropic_cache.py @@ -10,6 +10,7 @@ @pytest.mark.extended +@pytest.mark.skip(reason="claude-3-5-v2 not enabled") def test_anthropic_system_cache() -> None: """Test chat with system message having cache control.""" project = os.environ["PROJECT_ID"] @@ -35,6 +36,7 @@ def test_anthropic_system_cache() -> None: @pytest.mark.extended +@pytest.mark.skip(reason="claude-3-5-v2 not enabled") def test_anthropic_mixed_cache() -> None: """Test chat with different cache control types.""" project = os.environ["PROJECT_ID"] @@ -72,6 +74,7 @@ def test_anthropic_mixed_cache() -> None: @pytest.mark.extended +@pytest.mark.skip(reason="claude-3-5-v2 not enabled") def test_anthropic_conversation_cache() -> None: """Test chat conversation with cache control.""" project = os.environ["PROJECT_ID"] @@ -115,6 +118,7 @@ def test_anthropic_conversation_cache() -> None: @pytest.mark.extended +@pytest.mark.skip(reason="claude-3-5-v2 not enabled") def test_anthropic_chat_template_cache() -> None: """Test chat template with structured content and cache control.""" project = os.environ["PROJECT_ID"] diff --git a/libs/vertexai/tests/integration_tests/test_chat_models.py b/libs/vertexai/tests/integration_tests/test_chat_models.py index f9c860a7..b943f6b1 100644 --- a/libs/vertexai/tests/integration_tests/test_chat_models.py +++ b/libs/vertexai/tests/integration_tests/test_chat_models.py @@ -1175,6 +1175,7 @@ def test_multimodal_pdf_input_b64(multimodal_pdf_chain: RunnableSerializable) -> assert isinstance(response, AIMessage) +@pytest.mark.xfail(reason="logprobs are subject to daily quotas") @pytest.mark.release def test_logprobs() -> None: llm = ChatVertexAI(model="gemini-1.5-flash", logprobs=2)