Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deprecate use_auth_token #684

Merged
merged 7 commits into from
Apr 29, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 18 additions & 4 deletions optimum/exporters/openvino/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ def main_export(
force_download: bool = False,
local_files_only: bool = False,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
model_kwargs: Optional[Dict[str, Any]] = None,
custom_export_configs: Optional[Dict[str, "OnnxConfig"]] = None,
fn_get_submodels: Optional[Callable] = None,
Expand Down Expand Up @@ -107,9 +108,11 @@ def main_export(
cached versions if they exist.
local_files_only (`Optional[bool]`, defaults to `False`):
Whether or not to only look at local files (i.e., do not try to download the model).
use_auth_token (`Optional[str]`, defaults to `None`):
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`).
when running `huggingface-cli login` (stored in `~/.huggingface`).
model_kwargs (`Optional[Dict[str, Any]]`, defaults to `None`):
Experimental usage: keyword arguments to pass to the model during
the export. This argument should be used along the `custom_export_configs` argument
Expand Down Expand Up @@ -138,6 +141,17 @@ def main_export(
```
"""

if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if compression_option is not None:
logger.warning(
"The `compression_option` argument is deprecated and will be removed in optimum-intel v1.17.0. "
Expand Down Expand Up @@ -196,7 +210,7 @@ def main_export(
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down Expand Up @@ -268,7 +282,7 @@ class StoreAttr(object):
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down
32 changes: 28 additions & 4 deletions optimum/intel/generation/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -354,7 +354,8 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[Union[str, None]] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -363,6 +364,17 @@ def _from_pretrained(
use_cache: bool = True,
**kwargs,
):
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if not getattr(config, "torchscript", False):
raise ValueError("`torchscript` should be set to True to load TorchScript model")

Expand All @@ -376,7 +388,7 @@ def _from_pretrained(
model_cache_path = hf_hub_download(
repo_id=model_id,
filename=file_name,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand All @@ -399,6 +411,7 @@ def _from_transformers(
model_id: str,
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -408,13 +421,24 @@ def _from_transformers(
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
**kwargs,
):
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if is_torch_version("<", "2.1.0"):
raise ImportError("`torch>=2.0.0` is needed to trace your model")

task = cls.export_feature
model_kwargs = {
"revision": revision,
"use_auth_token": use_auth_token,
"token": token,
"cache_dir": cache_dir,
"subfolder": subfolder,
"local_files_only": local_files_only,
Expand All @@ -436,7 +460,7 @@ def _from_transformers(
model_id=save_dir_path,
config=config,
use_cache=use_cache,
use_auth_token=use_auth_token,
token=token,
revision=revision,
force_download=force_download,
cache_dir=cache_dir,
Expand Down
30 changes: 27 additions & 3 deletions optimum/intel/ipex/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ def _from_transformers(
config: PretrainedConfig,
use_cache: bool = True,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -160,13 +161,24 @@ def _from_transformers(
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
trust_remote_code: bool = False,
):
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if is_torch_version("<", "2.1.0"):
raise ImportError("`torch>=2.0.0` is needed to trace your model")

task = cls.export_feature
model_kwargs = {
"revision": revision,
"use_auth_token": use_auth_token,
"token": token,
"cache_dir": cache_dir,
"subfolder": subfolder,
"local_files_only": local_files_only,
Expand All @@ -188,7 +200,8 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[Union[str, None]] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -197,6 +210,17 @@ def _from_pretrained(
subfolder: str = "",
**kwargs,
):
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if not getattr(config, "torchscript", False):
raise ValueError(
"`config.torchscript` should be set to `True`, if your model is not a TorchScript model and needs to be traced please set `export=True` when loading it with `.from_pretrained()`"
Expand All @@ -211,7 +235,7 @@ def _from_pretrained(
model_cache_path = hf_hub_download(
repo_id=model_id,
filename=file_name,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand Down
18 changes: 15 additions & 3 deletions optimum/intel/neural_compressor/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,8 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[Union[str, None]] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -108,6 +109,17 @@ def _from_pretrained(
trust_remote_code: bool = False,
**kwargs,
):
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

model_name_or_path = kwargs.pop("model_name_or_path", None)
if model_name_or_path is not None:
logger.warning("`model_name_or_path` is deprecated please use `model_id`")
Expand All @@ -122,7 +134,7 @@ def _from_pretrained(
repo_id=model_id,
filename=file_name,
subfolder=subfolder,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand All @@ -145,7 +157,7 @@ def _from_pretrained(

return _BaseQBitsAutoModelClass.from_pretrained(
pretrained_model_name_or_path=model_id,
use_auth_token=use_auth_token,
token=token,
revision=revision,
force_download=force_download,
cache_dir=cache_dir,
Expand Down
23 changes: 19 additions & 4 deletions optimum/intel/neural_compressor/quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -446,7 +446,8 @@ def get_calibration_dataset(
dataset_split: str = "train",
preprocess_function: Optional[Callable] = None,
preprocess_batch: bool = True,
use_auth_token: bool = False,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
) -> Dataset:
"""
Create the calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
Expand All @@ -465,16 +466,30 @@ def get_calibration_dataset(
Processing function to apply to each example after loading dataset.
preprocess_batch (`bool`, defaults to `True`):
Whether the `preprocess_function` should be batched.
use_auth_token (`bool`, defaults to `False`):
Whether to use the token generated when running `transformers-cli login`.
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`).
Returns:
The calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
"""
if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

calibration_dataset = load_dataset(
dataset_name,
name=dataset_config_name,
split=dataset_split,
use_auth_token=use_auth_token,
token=token,
)

if num_samples is not None:
Expand Down
24 changes: 19 additions & 5 deletions optimum/intel/openvino/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,9 +188,11 @@ def load_textual_inversion(
local_files_only (`bool`, *optional*, defaults to `False`):
Whether to only load local model weights and configuration files or not. If set to `True`, the model
won't be downloaded from the Hub.
use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, the token generated from
`diffusers-cli login` (stored in `~/.huggingface`) is used.
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier
allowed by Git.
Expand Down Expand Up @@ -258,11 +260,23 @@ def load_textual_inversion(
proxies = kwargs.pop("proxies", None)
local_files_only = kwargs.pop("local_files_only", HF_HUB_OFFLINE)
use_auth_token = kwargs.pop("use_auth_token", None)
token = kwargs.pop("token", None)
revision = kwargs.pop("revision", None)
subfolder = kwargs.pop("subfolder", None)
weight_name = kwargs.pop("weight_name", None)
use_safetensors = kwargs.pop("use_safetensors", None)

if use_auth_token is not None:
logger.warning(
"The `use_auth_token` argument is deprecated and will be removed soon. "
"Please use the `token` argument instead."
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")

token = use_auth_token
use_auth_token = None

if use_safetensors and not is_safetensors_available():
raise ValueError(
"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetensors"
Expand Down Expand Up @@ -319,7 +333,7 @@ def load_textual_inversion(
resume_download=resume_download,
proxies=proxies,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
use_auth_token=token, # still uses use_auth_token
revision=revision,
subfolder=subfolder,
user_agent=user_agent,
Expand All @@ -340,7 +354,7 @@ def load_textual_inversion(
resume_download=resume_download,
proxies=proxies,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
use_auth_token=token, # still uses use_auth_token
revision=revision,
subfolder=subfolder,
user_agent=user_agent,
Expand Down
Loading
Loading