File tree 2 files changed +8
-2
lines changed
2 files changed +8
-2
lines changed Original file line number Diff line number Diff line change 416
416
" logging_steps = 20 # 定义训练过程中每隔多少步骤输出一次日志\n " ,
417
417
" save_steps = 65 # 定义训练过程中每隔多少步骤保存一次模型\n " ,
418
418
" save_total_limit = 3 # 控制最多保留多少个模型 checkpoint\n " ,
419
- " report_to = None # 设置上报实验指标的目标,默认值为无 \n " ,
419
+ " report_to = \" none \" # 设置不上报实验指标,也可以设置为 \" wandb \" ,此时需要获取对应的 API,见:https://github.com/Hoper-J/AI-Guide-and-Demos-zh_CN/pull/5 \n " ,
420
420
" MICRO_BATCH_SIZE = 4 # 定义微批次大小\n " ,
421
421
" BATCH_SIZE = 16 # 定义一个批次的大小\n " ,
422
422
" GRADIENT_ACCUMULATION_STEPS = BATCH_SIZE // MICRO_BATCH_SIZE # 计算每个微批次累积的梯度步骤\n " ,
Original file line number Diff line number Diff line change @@ -361,7 +361,7 @@ dataset_dir = "./GenAI-Hw5/Tang_training_data.json" # 设置数据集目录或
361
361
logging_steps = 20 # 定义训练过程中每隔多少步骤输出一次日志
362
362
save_steps = 65 # 定义训练过程中每隔多少步骤保存一次模型
363
363
save_total_limit = 3 # 控制最多保留多少个模型 checkpoint
364
- report_to = None # 设置上报实验指标的目标,默认值为无
364
+ report_to = " none " # 设置不上报实验指标,也可以设置为 "wandb",此时需要获取对应的 API,见:https://github.com/Hoper-J/AI-Guide-and-Demos-zh_CN/pull/5
365
365
MICRO_BATCH_SIZE = 4 # 定义微批次大小
366
366
BATCH_SIZE = 16 # 定义一个批次的大小
367
367
GRADIENT_ACCUMULATION_STEPS = BATCH_SIZE // MICRO_BATCH_SIZE # 计算每个微批次累积的梯度步骤
@@ -379,6 +379,12 @@ if ddp:
379
379
GRADIENT_ACCUMULATION_STEPS = GRADIENT_ACCUMULATION_STEPS // world_size
380
380
```
381
381
382
+ > [ !note]
383
+ >
384
+ > 参数 ` report_to ` 设置为 ` None ` 是无效的,正确的做法是设置为 ` "none" ` [ ^ 1 ] 。
385
+
386
+ [ ^ 1 ] : [[ How to turn WanDB off in trainer?] ( https://discuss.huggingface.co/t/how-to-turn-wandb-off-in-trainer/6237 )] ( https://discuss.huggingface.co/t/how-to-turn-wandb-off-in-trainer/6237/4 ) .
387
+
382
388
# 开始微调
383
389
384
390
``` python
You can’t perform that action at this time.
0 commit comments