From 7ea7b6cfce1a16edcce7ac45ae8f6bc1688fba61 Mon Sep 17 00:00:00 2001 From: c0sogi Date: Tue, 22 Aug 2023 21:20:59 +0900 Subject: [PATCH] ci fail resolve - 2 --- llama_api/mixins/completion.py | 2 +- llama_api/shared/config.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/llama_api/mixins/completion.py b/llama_api/mixins/completion.py index 26f03f9..f5404bc 100644 --- a/llama_api/mixins/completion.py +++ b/llama_api/mixins/completion.py @@ -24,7 +24,7 @@ class CompletionStatus: class CompletionMixin: """A mixin for modules that support completion generation.""" - _completion_status: Optional[defaultdict[str, CompletionStatus]] = None + _completion_status: Optional["defaultdict[str, CompletionStatus]"] = None @property def completion_status(self) -> Dict[str, CompletionStatus]: diff --git a/llama_api/shared/config.py b/llama_api/shared/config.py index 7b2e791..3a9c2e2 100644 --- a/llama_api/shared/config.py +++ b/llama_api/shared/config.py @@ -295,11 +295,11 @@ class Config: "exllama": GitCloneArgs( git_path="https://github.com/turboderp/exllama", disk_path="repositories/exllama", - options=["recurse-submodules"], + options=None, ), "llama_cpp": GitCloneArgs( git_path="https://github.com/abetlen/llama-cpp-python", disk_path="repositories/llama_cpp", - options=None, + options=["--recurse-submodules"], ), }