langchain[patch], experimental[minor]: Adds OllamaFunctions wrapper (#13330)

CC @baskaryan @hwchase17 @jmorganca 

Having a bit of trouble importing `langchain_experimental` from a
notebook, will figure it out tomorrow

~Ah and also is blocked by #13226~

---------

Co-authored-by: Lance Martin <lance@langchain.dev>
Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
Jacob Lee
2023-11-30 16:13:57 -08:00
committed by GitHub
parent 4063bf144a
commit 3328507f11
7 changed files with 529 additions and 6 deletions

View File

@@ -94,11 +94,18 @@ class _OllamaCommon(BaseLanguageModel):
template: Optional[str] = None
"""full prompt or prompt template (overrides what is defined in the Modelfile)"""
format: Optional[str] = None
"""Specify the format of the output (e.g., json)"""
timeout: Optional[int] = None
"""Timeout for the request stream"""
@property
def _default_params(self) -> Dict[str, Any]:
"""Get the default parameters for calling Ollama."""
return {
"model": self.model,
"format": self.format,
"options": {
"mirostat": self.mirostat,
"mirostat_eta": self.mirostat_eta,
@@ -121,7 +128,7 @@ class _OllamaCommon(BaseLanguageModel):
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""
return {**{"model": self.model}, **self._default_params}
return {**{"model": self.model, "format": self.format}, **self._default_params}
def _create_stream(
self,
@@ -155,6 +162,7 @@ class _OllamaCommon(BaseLanguageModel):
headers={"Content-Type": "application/json"},
json={"prompt": prompt, **params},
stream=True,
timeout=self.timeout,
)
response.encoding = "utf-8"
if response.status_code != 200: