Skip to content

Commit

Permalink
add
Browse files Browse the repository at this point in the history
  • Loading branch information
echarlaix committed Nov 3, 2023
1 parent 3735c69 commit 3a706f8
Showing 1 changed file with 20 additions and 0 deletions.
20 changes: 20 additions & 0 deletions optimum/intel/neural_compressor/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
from transformers.training_args import ParallelMode, TrainingArguments
from transformers.utils import (
WEIGHTS_NAME,
is_accelerate_available,
is_apex_available,
is_sagemaker_mp_enabled,
is_torch_tpu_available,
Expand All @@ -69,12 +70,31 @@
from .configuration import INCConfig


if is_accelerate_available():
from accelerate import __version__ as accelerate_version
from accelerate import skip_first_batches

if version.parse(accelerate_version) > version.parse("0.20.3"):
pass
DATA_SAMPLERS = [RandomSampler]
if version.parse(accelerate_version) > version.parse("0.23.0"):
from accelerate.data_loader import SeedableRandomSampler

DATA_SAMPLERS += [SeedableRandomSampler]

if is_deepspeed_available():
pass


if is_apex_available():
from apex import amp

if is_sagemaker_mp_enabled():
import smdistributed.modelparallel.torch as smp

if is_torch_tpu_available(check_device=False):
import torch_xla.core.xla_model as xm


if TYPE_CHECKING:
from optimum.exporters.onnx import OnnxConfig
Expand Down

0 comments on commit 3a706f8

Please sign in to comment.