Skip to content

Commit

Permalink
fix: use get_temperature in base LLM generate (#1520)
Browse files Browse the repository at this point in the history
Matches the behavior in agenerate, avoiding some API call failures
  • Loading branch information
sapphire-arches authored Oct 17, 2024
1 parent efdda3f commit 87e5fa5
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/ragas/llms/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ async def generate(
"""Generate text using the given event loop."""

if temperature is None:
temperature = 1e-8
temperature = self.get_temperature(n)

if is_async:
agenerate_text_with_retry = add_async_retry(
Expand Down Expand Up @@ -280,7 +280,7 @@ async def agenerate_text(
callbacks: Callbacks = None,
) -> LLMResult:
if temperature is None:
temperature = 1e-8
temperature = self.get_temperature(n)

kwargs = self.check_args(n, temperature, stop, callbacks)
li_response = await self.llm.acomplete(prompt.to_string(), **kwargs)
Expand Down

0 comments on commit 87e5fa5

Please sign in to comment.