Skip to content

Commit

Permalink
Don't compute confidence if the LLM does not return logprobs in the r…
Browse files Browse the repository at this point in the history
…esponse
  • Loading branch information
nihit committed Dec 12, 2024
1 parent f43a21d commit 65ca5b2
Showing 1 changed file with 2 additions and 29 deletions.
31 changes: 2 additions & 29 deletions src/autolabel/confidence.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,36 +276,9 @@ async def calculate(
if model_generation.raw_response == "":
model_generation.confidence_score = 0
return model_generation.confidence_score
if self.cache:
cache_entry = ConfidenceCacheEntry(
prompt=model_generation.prompt,
raw_response=model_generation.raw_response,
score_type=self.score_type,
)
logprobs = self.cache.lookup(cache_entry)

# On cache miss, compute logprobs using API call and update cache
if logprobs == None:
logprobs = await self.compute_confidence(
model_generation.prompt,
model_generation.raw_response,
)
if not logprobs:
return self.return_empty_logprob(model_generation)
cache_entry = ConfidenceCacheEntry(
prompt=model_generation.prompt,
raw_response=model_generation.raw_response,
logprobs=logprobs,
score_type=self.score_type,
ttl_ms=self.TTL_MS,
)
self.cache.update(cache_entry)

else:
logprobs = await self.compute_confidence(
model_generation.prompt, model_generation.raw_response,
)
if not logprobs:
return self.return_empty_logprob(model_generation)
return self.return_empty_logprob(model_generation)
else:
if model_generation.generation_info is None:
logger.debug("No generation info found")
Expand Down

0 comments on commit 65ca5b2

Please sign in to comment.