-
Notifications
You must be signed in to change notification settings - Fork 117
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Convert tokenizers with openvino_tokenizers #500
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
|
@@ -19,6 +19,7 @@ | |||||
|
||||||
from requests.exceptions import ConnectionError as RequestsConnectionError | ||||||
from transformers import AutoConfig, AutoTokenizer | ||||||
from openvino import save_model | ||||||
|
||||||
from optimum.exporters import TasksManager | ||||||
from optimum.exporters.onnx import __main__ as optimum_main | ||||||
|
@@ -46,6 +47,24 @@ | |||||
logger = logging.getLogger(__name__) | ||||||
|
||||||
|
||||||
def tokenizer_export( | ||||||
tokenizer, | ||||||
output: Union[str, Path], | ||||||
suffix: Optional[str] = "" | ||||||
): | ||||||
try: | ||||||
from openvino_tokenizers import convert_tokenizer | ||||||
ov_tokenizer, ov_detokenizer = convert_tokenizer(tokenizer, with_detokenizer=True) | ||||||
if isinstance(output, str): | ||||||
output = Path(output) | ||||||
tokenizer_path = output.joinpath("openvino_tokenizer" + suffix + ".xml") | ||||||
detokenizer_path = output.joinpath("openvino_detokenizer" + suffix + ".xml") | ||||||
save_model(ov_tokenizer, tokenizer_path) | ||||||
save_model(ov_detokenizer, detokenizer_path) | ||||||
except Exception as exception: | ||||||
print("[ WARNING ] OpenVINO tokenizer/detokenizer models couldn't be exported because of exception:", exception) | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Fixed. |
||||||
|
||||||
|
||||||
def main_export( | ||||||
model_name_or_path: str, | ||||||
output: Union[str, Path], | ||||||
|
@@ -328,6 +347,12 @@ class StoreAttr(object): | |||||
if generation_config is not None: | ||||||
generation_config.save_pretrained(output) | ||||||
maybe_save_preprocessors(model_name_or_path, output) | ||||||
try: | ||||||
# TODO: Avoid loading the tokenizer again if loaded before | ||||||
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path) | ||||||
tokenizer_export(tokenizer, output) | ||||||
except: | ||||||
print("[ WARNING ] Could not load tokenizer using specified model ID or path. OpenVINO tokenizer/detokenizer models won't be generated.") | ||||||
Comment on lines
+350
to
+355
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. tokenizer already loaded in maybe_load_preprocessors function, I recommend to check this function result instead. Also possibly you should take into account trust_remote_code parameter if you want load tokenizer explicitly There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Reuse tokenizer from There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Here is the PR: slyalin#2 |
||||||
|
||||||
if model.config.is_encoder_decoder and task.startswith("text-generation"): | ||||||
raise ValueError( | ||||||
|
@@ -358,10 +383,12 @@ class StoreAttr(object): | |||||
tokenizer = getattr(model, "tokenizer", None) | ||||||
if tokenizer is not None: | ||||||
tokenizer.save_pretrained(output.joinpath("tokenizer")) | ||||||
tokenizer_export(tokenizer, output) | ||||||
|
||||||
tokenizer_2 = getattr(model, "tokenizer_2", None) | ||||||
if tokenizer_2 is not None: | ||||||
tokenizer_2.save_pretrained(output.joinpath("tokenizer_2")) | ||||||
tokenizer_export(tokenizer, output, "_2") | ||||||
|
||||||
model.save_config(output) | ||||||
|
||||||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I think it should be in convert.py file together with other conversion functions.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Moved.