Skip to content

Commit 717feb0

Browse files
committed
Missing distillation cfg in main
1 parent aa163bc commit 717feb0

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

verl/trainer/main_ppo.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -236,8 +236,8 @@ def init_resource_pool_mgr(self, config):
236236
config.reward.reward_model.nnodes = config.trainer.nnodes
237237
config.reward.reward_model.n_gpus_per_node = config.trainer.n_gpus_per_node
238238

239-
distillation_config = config.distillation
240-
if distillation_config.enabled and distillation_config.teacher_model.enable_resource_pool:
239+
distillation_config = config.get("distillation")
240+
if is_distillation_enabled(distillation_config) and distillation_config.teacher_model.enable_resource_pool:
241241
if distillation_config.teacher_model.n_gpus_per_node <= 0:
242242
raise ValueError("config.distillation.teacher_model.n_gpus_per_node must be greater than 0")
243243
if distillation_config.teacher_model.nnodes <= 0:
@@ -269,7 +269,7 @@ def add_teacher_model_resource_pool(self, config):
269269
"""Add teacher model worker if enabled."""
270270
from verl.trainer.ppo.ray_trainer import Role
271271

272-
if is_distillation_enabled(config.distillation):
272+
if is_distillation_enabled(config.get("distillation")):
273273
# we do not use teacher model workers, so we only register teacher model in resource pool
274274
# without registering a teacher model worker in role-worker mapping
275275
if config.distillation.teacher_model.enable_resource_pool:

0 commit comments

Comments
 (0)