mirror of
https://github.com/hwchase17/langchain.git
synced 2025-10-22 09:41:52 +00:00
46 lines
1.3 KiB
Python
46 lines
1.3 KiB
Python
from typing import List
|
|
|
|
import ollama
|
|
from langchain_core.embeddings import Embeddings
|
|
from langchain_core.pydantic_v1 import BaseModel, Extra
|
|
from ollama import AsyncClient
|
|
|
|
|
|
class OllamaEmbeddings(BaseModel, Embeddings):
|
|
"""OllamaEmbeddings embedding model.
|
|
|
|
Example:
|
|
.. code-block:: python
|
|
|
|
from langchain_ollama import OllamaEmbeddings
|
|
|
|
embedder = OllamaEmbeddings(model="llama3")
|
|
embedder.embed_query("what is the place that jonathan worked at?")
|
|
"""
|
|
|
|
model: str
|
|
"""Model name to use."""
|
|
|
|
class Config:
|
|
"""Configuration for this pydantic object."""
|
|
|
|
extra = Extra.forbid
|
|
|
|
def embed_documents(self, texts: List[str]) -> List[List[float]]:
|
|
"""Embed search docs."""
|
|
embedded_docs = ollama.embed(self.model, texts)["embeddings"]
|
|
return embedded_docs
|
|
|
|
def embed_query(self, text: str) -> List[float]:
|
|
"""Embed query text."""
|
|
return self.embed_documents([text])[0]
|
|
|
|
async def aembed_documents(self, texts: List[str]) -> List[List[float]]:
|
|
"""Embed search docs."""
|
|
embedded_docs = (await AsyncClient().embed(self.model, texts))["embeddings"]
|
|
return embedded_docs
|
|
|
|
async def aembed_query(self, text: str) -> List[float]:
|
|
"""Embed query text."""
|
|
return (await self.aembed_documents([text]))[0]
|