( model: typing.Union[transformers.modeling_utils.PreTrainedModel, torch.nn.modules.module.Module] = None args: TrainingArguments = None data_collator: typing.Optional[DataCollator] = None train_dataset: typing.Optional[torch.utils.data.dataset.Dataset] = None eval_dataset: typing.Optional[torch.utils.data.dataset.Dataset] = None tokenizer: typing.Optional[transformers.tokenization_utils_base.PreTrainedTokenizerBase] = None model_init: typing.Callable[[], transformers.modeling_utils.PreTrainedModel] = None compute_metrics: typing.Union[typing.Callable[[transformers.trainer_utils.EvalPrediction], typing.Dict], NoneType] = None callbacks: typing.Optional[typing.List[transformers.trainer_callback.TrainerCallback]] = None optimizers: typing.Tuple[torch.optim.optimizer.Optimizer, torch.optim.lr_scheduler.LambdaLR] = (None, None) )
( pruner: typing.Optional[neural_compressor.experimental.pruning.Pruning] = None resume_from_checkpoint: typing.Union[str, bool, NoneType] = None trial: typing.Union[ForwardRef('optuna.Trial'), typing.Dict[str, typing.Any]] = None ignore_keys_for_eval: typing.Optional[typing.List[str]] = None **kwargs )
Parameters
Pruning, optional) —
Pruning object handling the pruning process.
str or bool, optional) —
If a str, local path to a saved checkpoint as saved by a previous instance of
Trainer. If a bool and equals True, load the last checkpoint in
args.output_dir as saved by a previous instance of Trainer. If present,
training will resume from the model/optimizer/scheduler states loaded here.
optuna.Trial or Dict[str, Any], optional) —
The trial run or the hyperparameter dictionary for hyperparameter search.
List[str], optional) —
A list of keys in the output of your model (if it is a dictionary) that should be ignored when
gathering predictions for evaluation during the training.
kwargs —
Additional keyword arguments used to hide deprecated arguments
Main training entry point.