Files
langchain/libs/partners/ollama/langchain_ollama/embeddings.py
2024-07-24 00:00:13 +00:00

46 lines
1.3 KiB
Python

from typing import List
import ollama
from langchain_core.embeddings import Embeddings
from langchain_core.pydantic_v1 import BaseModel, Extra
from ollama import AsyncClient
class OllamaEmbeddings(BaseModel, Embeddings):
"""OllamaEmbeddings embedding model.
Example:
.. code-block:: python
from langchain_ollama import OllamaEmbeddings
embedder = OllamaEmbeddings(model="llama3")
embedder.embed_query("what is the place that jonathan worked at?")
"""
model: str
"""Model name to use."""
class Config:
"""Configuration for this pydantic object."""
extra = Extra.forbid
def embed_documents(self, texts: List[str]) -> List[List[float]]:
"""Embed search docs."""
embedded_docs = ollama.embed(self.model, texts)["embeddings"]
return embedded_docs
def embed_query(self, text: str) -> List[float]:
"""Embed query text."""
return self.embed_documents([text])[0]
async def aembed_documents(self, texts: List[str]) -> List[List[float]]:
"""Embed search docs."""
embedded_docs = (await AsyncClient().embed(self.model, texts))["embeddings"]
return embedded_docs
async def aembed_query(self, text: str) -> List[float]:
"""Embed query text."""
return (await self.aembed_documents([text]))[0]