File size: 1,584 Bytes
392dfd9
05fffb5
ce24f5e
 
 
125cccb
b21e4a2
861ceca
 
 
 
 
 
 
 
 
 
 
b21e4a2
861ceca
b21e4a2
 
861ceca
 
 
 
 
 
988aeb9
87d7825
b21e4a2
125cccb
fdb777b
125cccb
 
 
 
b21e4a2
 
 
 
 
 
 
 
 
 
 
125cccb
 
ce24f5e
b21e4a2
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
"""Prepare and train a model on a dataset. Can also infer from a model or merge lora"""
import logging
from pathlib import Path

import fire
import transformers

from axolotl.cli import (
    check_accelerate_default_config,
    do_inference,
    do_merge_lora,
    load_cfg,
    load_datasets,
    print_axolotl_text_art,
)
from axolotl.cli.shard import shard
from axolotl.common.cli import TrainerCliArgs
from axolotl.train import train

LOG = logging.getLogger("axolotl.scripts.finetune")


def do_cli(config: Path = Path("examples/"), **kwargs):
    print_axolotl_text_art()
    LOG.warning(
        str(
            PendingDeprecationWarning(
                "scripts/finetune.py will be replaced with calling axolotl.cli.train"
            )
        )
    )
    parsed_cfg = load_cfg(config, **kwargs)
    check_accelerate_default_config()
    parser = transformers.HfArgumentParser((TrainerCliArgs))
    parsed_cli_args, _ = parser.parse_args_into_dataclasses(
        return_remaining_strings=True
    )
    if parsed_cli_args.inference:
        do_inference(cfg=parsed_cfg, cli_args=parsed_cli_args)
    elif parsed_cli_args.merge_lora:
        do_merge_lora(cfg=parsed_cfg, cli_args=parsed_cli_args)
    elif parsed_cli_args.shard:
        shard(cfg=parsed_cfg, cli_args=parsed_cli_args)
    else:
        dataset_meta = load_datasets(cfg=parsed_cfg, cli_args=parsed_cli_args)
        if parsed_cli_args.prepare_ds_only:
            return
        train(cfg=parsed_cfg, cli_args=parsed_cli_args, dataset_meta=dataset_meta)


if __name__ == "__main__":
    fire.Fire(do_cli)