mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-24 15:43:54 +00:00
ollama[patch]: Support seed param for OllamaLLM (#30553)
**Description:** a description of the change add the seed param for OllamaLLM client reproducibility **Issue:** the issue # it fixes, if applicable follow up of a similar issue https://github.com/langchain-ai/langchain/issues/24703 see also https://github.com/langchain-ai/langchain/pull/24782 **Dependencies:** any dependencies required for this change n/a
This commit is contained in:
parent
8395abbb42
commit
64f97e707e
@ -84,6 +84,11 @@ class OllamaLLM(BaseLLM):
|
||||
"""The temperature of the model. Increasing the temperature will
|
||||
make the model answer more creatively. (Default: 0.8)"""
|
||||
|
||||
seed: Optional[int] = None
|
||||
"""Sets the random number seed to use for generation. Setting this
|
||||
to a specific number will make the model generate the same text for
|
||||
the same prompt."""
|
||||
|
||||
stop: Optional[List[str]] = None
|
||||
"""Sets the stop tokens to use."""
|
||||
|
||||
@ -150,6 +155,7 @@ class OllamaLLM(BaseLLM):
|
||||
"repeat_last_n": self.repeat_last_n,
|
||||
"repeat_penalty": self.repeat_penalty,
|
||||
"temperature": self.temperature,
|
||||
"seed": self.seed,
|
||||
"stop": self.stop if stop is None else stop,
|
||||
"tfs_z": self.tfs_z,
|
||||
"top_k": self.top_k,
|
||||
|
Loading…
Reference in New Issue
Block a user