fix perplexity scores

This commit is contained in:
Wing Lian
2025-04-17 07:58:53 -07:00
parent d2637fb01d
commit b4c7d9c29d

View File

@@ -36,7 +36,7 @@ One day, a little fish named Fin was swimming near the shore. He saw a big crab
"""
result = metric.compute(model, [sample_text])
ppl = result["score"]
assert round(ppl, 2) == 7.41
assert round(ppl, 2) == 75.14
def test_perplexity_short(model, metric):
@@ -44,4 +44,4 @@ def test_perplexity_short(model, metric):
sample_text = "Once upon a time, there was a little car named Beep. Beep loved to go fast and play in the sun."
result = metric.compute(model, [sample_text])
ppl = result["score"]
assert round(ppl, 2) == 10.33
assert round(ppl, 2) == 70.54