Skip to content

Commit

Permalink
Deprecate use_auth_token (#684)
Browse files Browse the repository at this point in the history
* added token argument

* updated docstrings

* added deprecation warning

* added more warning and swapped token with use_ath_token

* use future warning istead

* added a test to read from private repo
  • Loading branch information
IlyasMoutawwakil authored Apr 29, 2024
1 parent 920b237 commit c287fba
Show file tree
Hide file tree
Showing 13 changed files with 315 additions and 59 deletions.
21 changes: 17 additions & 4 deletions optimum/exporters/openvino/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import logging
import warnings
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Union

Expand Down Expand Up @@ -57,6 +58,7 @@ def main_export(
force_download: bool = False,
local_files_only: bool = False,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
model_kwargs: Optional[Dict[str, Any]] = None,
custom_export_configs: Optional[Dict[str, "OnnxConfig"]] = None,
fn_get_submodels: Optional[Callable] = None,
Expand Down Expand Up @@ -107,9 +109,11 @@ def main_export(
cached versions if they exist.
local_files_only (`Optional[bool]`, defaults to `False`):
Whether or not to only look at local files (i.e., do not try to download the model).
use_auth_token (`Optional[str]`, defaults to `None`):
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`).
when running `huggingface-cli login` (stored in `~/.huggingface`).
model_kwargs (`Optional[Dict[str, Any]]`, defaults to `None`):
Experimental usage: keyword arguments to pass to the model during
the export. This argument should be used along the `custom_export_configs` argument
Expand Down Expand Up @@ -138,6 +142,15 @@ def main_export(
```
"""

if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if compression_option is not None:
logger.warning(
"The `compression_option` argument is deprecated and will be removed in optimum-intel v1.17.0. "
Expand Down Expand Up @@ -196,7 +209,7 @@ def main_export(
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down Expand Up @@ -268,7 +281,7 @@ class StoreAttr(object):
subfolder=subfolder,
revision=revision,
cache_dir=cache_dir,
use_auth_token=use_auth_token,
token=token,
local_files_only=local_files_only,
force_download=force_download,
trust_remote_code=trust_remote_code,
Expand Down
31 changes: 26 additions & 5 deletions optimum/intel/generation/modeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import inspect
import logging
import os
import warnings
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Optional, Tuple, Union
Expand Down Expand Up @@ -363,15 +364,25 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
revision: Optional[Union[str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
file_name: Optional[str] = WEIGHTS_NAME,
local_files_only: bool = False,
use_cache: bool = True,
**kwargs,
):
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if not getattr(config, "torchscript", False):
raise ValueError("`torchscript` should be set to True to load TorchScript model")

Expand All @@ -385,7 +396,7 @@ def _from_pretrained(
model_cache_path = hf_hub_download(
repo_id=model_id,
filename=file_name,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand All @@ -408,6 +419,7 @@ def _from_transformers(
model_id: str,
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -417,13 +429,22 @@ def _from_transformers(
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
**kwargs,
):
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if is_torch_version("<", "2.1.0"):
raise ImportError("`torch>=2.0.0` is needed to trace your model")

task = cls.export_feature
model_kwargs = {
"revision": revision,
"use_auth_token": use_auth_token,
"token": token,
"cache_dir": cache_dir,
"subfolder": subfolder,
"local_files_only": local_files_only,
Expand All @@ -445,7 +466,7 @@ def _from_transformers(
model_id=save_dir_path,
config=config,
use_cache=use_cache,
use_auth_token=use_auth_token,
token=token,
revision=revision,
force_download=force_download,
cache_dir=cache_dir,
Expand Down
33 changes: 29 additions & 4 deletions optimum/intel/ipex/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@

import logging
import os
import warnings
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Optional, Tuple, Union
Expand Down Expand Up @@ -152,6 +153,7 @@ def _from_transformers(
config: PretrainedConfig,
use_cache: bool = True,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
Expand All @@ -160,13 +162,24 @@ def _from_transformers(
torch_dtype: Optional[Union[str, "torch.dtype"]] = None,
trust_remote_code: bool = False,
):
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
FutureWarning,
)
if token is not None:
raise ValueError(
"Both the arguments `use_auth_token` and `token` were specified, which is not supported. Please specify only `token`."
)
token = use_auth_token

if is_torch_version("<", "2.1.0"):
raise ImportError("`torch>=2.0.0` is needed to trace your model")

task = cls.export_feature
model_kwargs = {
"revision": revision,
"use_auth_token": use_auth_token,
"token": token,
"cache_dir": cache_dir,
"subfolder": subfolder,
"local_files_only": local_files_only,
Expand All @@ -188,15 +201,27 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
revision: Optional[Union[str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
file_name: Optional[str] = WEIGHTS_NAME,
local_files_only: bool = False,
subfolder: str = "",
**kwargs,
):
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.",
FutureWarning,
)
if token is not None:
raise ValueError(
"Both the arguments `use_auth_token` and `token` were specified, which is not supported. Please specify only `token`."
)
token = use_auth_token

if not getattr(config, "torchscript", False):
raise ValueError(
"`config.torchscript` should be set to `True`, if your model is not a TorchScript model and needs to be traced please set `export=True` when loading it with `.from_pretrained()`"
Expand All @@ -211,7 +236,7 @@ def _from_pretrained(
model_cache_path = hf_hub_download(
repo_id=model_id,
filename=file_name,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand Down
19 changes: 15 additions & 4 deletions optimum/intel/neural_compressor/modeling_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

import logging
import os
import warnings
from pathlib import Path
from tempfile import TemporaryDirectory
from typing import Dict, Optional, Union
Expand Down Expand Up @@ -98,8 +99,9 @@ def _from_pretrained(
cls,
model_id: Union[str, Path],
config: PretrainedConfig,
use_auth_token: Optional[Union[bool, str, None]] = None,
revision: Optional[Union[str, None]] = None,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
revision: Optional[str] = None,
force_download: bool = False,
cache_dir: str = HUGGINGFACE_HUB_CACHE,
file_name: str = WEIGHTS_NAME,
Expand All @@ -108,6 +110,15 @@ def _from_pretrained(
trust_remote_code: bool = False,
**kwargs,
):
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

model_name_or_path = kwargs.pop("model_name_or_path", None)
if model_name_or_path is not None:
logger.warning("`model_name_or_path` is deprecated please use `model_id`")
Expand All @@ -122,7 +133,7 @@ def _from_pretrained(
repo_id=model_id,
filename=file_name,
subfolder=subfolder,
use_auth_token=use_auth_token,
token=token,
revision=revision,
cache_dir=cache_dir,
force_download=force_download,
Expand All @@ -145,7 +156,7 @@ def _from_pretrained(

return _BaseQBitsAutoModelClass.from_pretrained(
pretrained_model_name_or_path=model_id,
use_auth_token=use_auth_token,
token=token,
revision=revision,
force_download=force_download,
cache_dir=cache_dir,
Expand Down
22 changes: 18 additions & 4 deletions optimum/intel/neural_compressor/quantization.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import inspect
import logging
import types
import warnings
from enum import Enum
from itertools import chain
from pathlib import Path
Expand Down Expand Up @@ -446,7 +447,8 @@ def get_calibration_dataset(
dataset_split: str = "train",
preprocess_function: Optional[Callable] = None,
preprocess_batch: bool = True,
use_auth_token: bool = False,
use_auth_token: Optional[Union[bool, str]] = None,
token: Optional[Union[bool, str]] = None,
) -> Dataset:
"""
Create the calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
Expand All @@ -465,16 +467,28 @@ def get_calibration_dataset(
Processing function to apply to each example after loading dataset.
preprocess_batch (`bool`, defaults to `True`):
Whether the `preprocess_function` should be batched.
use_auth_token (`bool`, defaults to `False`):
Whether to use the token generated when running `transformers-cli login`.
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`).
Returns:
The calibration `datasets.Dataset` to use for the post-training static quantization calibration step.
"""
if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

calibration_dataset = load_dataset(
dataset_name,
name=dataset_config_name,
split=dataset_split,
use_auth_token=use_auth_token,
token=token,
)

if num_samples is not None:
Expand Down
23 changes: 18 additions & 5 deletions optimum/intel/openvino/loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
# limitations under the License.

import logging
import warnings
from typing import Dict, List, Optional, Union

import torch
Expand Down Expand Up @@ -188,9 +189,11 @@ def load_textual_inversion(
local_files_only (`bool`, *optional*, defaults to `False`):
Whether to only load local model weights and configuration files or not. If set to `True`, the model
won't be downloaded from the Hub.
use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, the token generated from
`diffusers-cli login` (stored in `~/.huggingface`) is used.
use_auth_token (Optional[Union[bool, str]], defaults to `None`):
Deprecated. Please use `token` instead.
token (Optional[Union[bool, str]], defaults to `None`):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `huggingface-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, a commit id, or any identifier
allowed by Git.
Expand Down Expand Up @@ -258,11 +261,21 @@ def load_textual_inversion(
proxies = kwargs.pop("proxies", None)
local_files_only = kwargs.pop("local_files_only", HF_HUB_OFFLINE)
use_auth_token = kwargs.pop("use_auth_token", None)
token = kwargs.pop("token", None)
revision = kwargs.pop("revision", None)
subfolder = kwargs.pop("subfolder", None)
weight_name = kwargs.pop("weight_name", None)
use_safetensors = kwargs.pop("use_safetensors", None)

if use_auth_token is not None:
warnings.warn(
"The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.",
FutureWarning,
)
if token is not None:
raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.")
token = use_auth_token

if use_safetensors and not is_safetensors_available():
raise ValueError(
"`use_safetensors`=True but safetensors is not installed. Please install safetensors with `pip install safetensors"
Expand Down Expand Up @@ -319,7 +332,7 @@ def load_textual_inversion(
resume_download=resume_download,
proxies=proxies,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
use_auth_token=token, # still uses use_auth_token
revision=revision,
subfolder=subfolder,
user_agent=user_agent,
Expand All @@ -340,7 +353,7 @@ def load_textual_inversion(
resume_download=resume_download,
proxies=proxies,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
use_auth_token=token, # still uses use_auth_token
revision=revision,
subfolder=subfolder,
user_agent=user_agent,
Expand Down
Loading

0 comments on commit c287fba

Please sign in to comment.