You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
import os
from llava.conversation import conv_templates, SeparatorStyle
from llava.utils import disable_torch_init
from transformers import CLIPVisionModel, CLIPImageProcessor, StoppingCriteria
from llava.model import *
from llava.model.utils import KeywordsStoppingCriteria
실행하면 다음과 같은 오류가 발생합니다.
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
Cell In[11], line 4
2 import torch
3 import os
----> 4 from llava.conversation import conv_templates, SeparatorStyle
5 from llava.utils import disable_torch_init
6 from transformers import CLIPVisionModel, CLIPImageProcessor, StoppingCriteria
File /home/share/LSE/hde-intg/KoLLaVA/llava/__init__.py:1
----> 1 from .model import LlavaLlamaForCausalLM
File /home/share/LSE/hde-intg/KoLLaVA/llava/model/__init__.py:1
----> 1 from .language_model.llava_llama import LlavaLlamaForCausalLM, LlavaConfig
2 from .language_model.llava_mpt import LlavaMPTForCausalLM, LlavaMPTConfig
File /home/share/LSE/hde-intg/KoLLaVA/llava/model/language_model/llava_llama.py:40
36 def __init__(self, config: LlamaConfig):
37 super(LlavaLlamaModel, self).__init__(config)
---> 40 class LlavaLlamaForCausalLM(LlamaForCausalLM, LlavaMetaForCausalLM):
41 config_class = LlavaConfig
43 def __init__(self, config):
TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases
The text was updated successfully, but these errors were encountered:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[3], line 4
2 import torch
3 import os
----> 4 from llava.conversation import conv_templates, SeparatorStyle
5 from llava.utils import disable_torch_init
6 from transformers import CLIPVisionModel, CLIPImageProcessor, StoppingCriteria
File /home/share/LSE/hde-intg/KoLLaVA/llava/__init__.py:1
----> 1 from .model import LlavaLlamaForCausalLM
File /home/share/LSE/hde-intg/KoLLaVA/llava/model/__init__.py:1
----> 1 from .language_model.llava_llama import LlavaLlamaForCausalLM, LlavaConfig
2 from .language_model.llava_mpt import LlavaMPTForCausalLM, LlavaMPTConfig
File /home/share/LSE/hde-intg/KoLLaVA/llava/model/language_model/llava_llama.py:110
107 _inputs['images'] = images
108 return _inputs
--> 110 AutoConfig.register("llava", LlavaConfig)
111 AutoModelForCausalLM.register(LlavaConfig, LlavaLlamaForCausalLM)
File ~/envs/hde-intg-dev-3.10/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:1074, in AutoConfig.register(model_type, config, exist_ok)
1068 if issubclass(config, PretrainedConfig) and config.model_type != model_type:
1069 raise ValueError(
1070 "The config you are passing has a `model_type` attribute that is not consistent with the model type "
1071 f"you passed (config has {config.model_type} and you passed {model_type}. Fix one of those so they "
1072 "match!"
1073 )
-> 1074 CONFIG_MAPPING.register(model_type, config, exist_ok=exist_ok)
File ~/envs/hde-intg-dev-3.10/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:773, in _LazyConfigMapping.register(self, key, value, exist_ok)
769 """
770 Register a new configuration in this mapping.
771 """
772 if key in self._mapping.keys() and not exist_ok:
--> 773 raise ValueError(f"'{key}' is already used by a Transformers config, pick another name.")
774 self._extra_content[key] = value
ValueError: 'llava' is already used by a Transformers config, pick another name.
아래의 링크에 있는 코드에서
https://github.com/tabtoyou/KoLLaVA/blob/main/KoLLaVA-v1-Kovicuna-7b_inference.ipynb]
실행하면 다음과 같은 오류가 발생합니다.
The text was updated successfully, but these errors were encountered: