Skip to content

Commit

Permalink
fix post_init (#2855)
Browse files Browse the repository at this point in the history
  • Loading branch information
Jintao-Huang authored Jan 4, 2025
1 parent 07f10d2 commit ab22d5c
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 13 deletions.
1 change: 0 additions & 1 deletion swift/llm/argument/export_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ def _init_output_dir(self):
assert not os.path.exists(self.output_dir), f'args.output_dir: {self.output_dir} already exists.'

def __post_init__(self):
MergeArguments.__post_init__(self)
BaseArguments.__post_init__(self)
self._init_output_dir()
if self.quant_bits:
Expand Down
1 change: 0 additions & 1 deletion swift/llm/argument/infer_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,6 @@ def _init_pt_ddp(self):

def __post_init__(self) -> None:
BaseArguments.__post_init__(self)
MergeArguments.__post_init__(self)
VllmArguments.__post_init__(self)
self._init_result_path('infer_result')
self._init_eval_human()
Expand Down
11 changes: 5 additions & 6 deletions swift/llm/argument/train_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,12 +136,11 @@ def __post_init__(self) -> None:
TunerArguments.__post_init__(self)
TorchAccArguments.__post_init__(self)

if self.lorap_lr_ratio:
self.optimizer = 'lorap'
elif self.use_galore:
self.optimizer = 'galore'
elif self.optimizer is None:
self.optimizer = 'default'
if self.optimizer is None:
if self.lorap_lr_ratio:
self.optimizer = 'lorap'
elif self.use_galore:
self.optimizer = 'galore'

if len(self.dataset) == 0:
raise ValueError(f'self.dataset: {self.dataset}, Please input the training dataset.')
Expand Down
5 changes: 0 additions & 5 deletions swift/plugin/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,8 @@ def create_lorap_optimizers(args, model, dataset):
return optimizer_cls(optimizer_grouped_parameters, **optimizer_kwargs), None


def default_create_optimizers(args, model, dataset):
return None, None


# Add your own optimizers here, use --optimizer xxx to train
optimizers_map = {
'galore': create_galore_optimizers,
'lorap': create_lorap_optimizers,
'default': default_create_optimizers
}

0 comments on commit ab22d5c

Please sign in to comment.