diff --git a/src/lighteval/metrics/llm_as_judge.py b/src/lighteval/metrics/llm_as_judge.py index 741c0863..54f1ed49 100644 --- a/src/lighteval/metrics/llm_as_judge.py +++ b/src/lighteval/metrics/llm_as_judge.py @@ -119,7 +119,7 @@ def __lazy_load_client(self): from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline transformers_model = AutoModelForCausalLM.from_pretrained( - self.model, torch_dtype=torch.bfloat16, trust_remote_code=False, device_map="cuda" + self.model, torch_dtype=torch.float16, trust_remote_code=False, device_map="cuda" ) tokenizer = AutoTokenizer.from_pretrained(self.model) self.pipe = pipeline(