fix: return notimplemented for ppo
This commit is contained in:
@@ -247,4 +247,4 @@ class HFPPOTrainerBuilder(TrainerBuilderBase):
|
||||
|
||||
def build(self, total_num_steps):
|
||||
# build PPOConfig
|
||||
pass
|
||||
raise NotImplementedError("PPO trainer builder is not implemented yet.")
|
||||
|
||||
Reference in New Issue
Block a user