From 9f0a1da3a8d02229afcdafd1dfb4832cdeffeaa1 Mon Sep 17 00:00:00 2001 From: Yizhen Date: Sun, 23 Jun 2024 12:56:45 +0800 Subject: [PATCH] [Usability] better to set overwrite_cache for rm inference in online rlhf --- scripts/run_rm_inference.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/run_rm_inference.sh b/scripts/run_rm_inference.sh index f6446986d..701daf120 100644 --- a/scripts/run_rm_inference.sh +++ b/scripts/run_rm_inference.sh @@ -65,6 +65,7 @@ accelerate launch --config_file configs/accelerator_multigpu_config.yaml \ --block_size 4096 \ --inference_batch_size 16 \ --dataset_path ${dataset_path} \ + --overwrite_cache True \ --conversation_template ${conversation_template} \ --preprocessing_num_workers 16 \ --save_results True \