mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-23 20:26:15 +00:00
doc:ollama document (#1512)
This commit is contained in:
parent
d3131552d3
commit
f389a0c32d
@ -19,6 +19,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
CHROMA_COLLECTION_NAME = "langchain"
|
||||
|
||||
|
||||
@register_resource(
|
||||
_("Chroma Vector Store"),
|
||||
"chroma_vector_store",
|
||||
@ -152,7 +153,6 @@ class ChromaStore(VectorStoreBase):
|
||||
files = list(filter(lambda f: f != "chroma.sqlite3", files))
|
||||
return len(files) > 0
|
||||
|
||||
|
||||
def load_document(self, chunks: List[Chunk]) -> List[str]:
|
||||
"""Load document to vector store."""
|
||||
logger.info("ChromaStore load document")
|
||||
|
@ -66,7 +66,7 @@ class PGVectorStore(VectorStoreBase):
|
||||
embedding_function=self.embeddings,
|
||||
collection_name=self.collection_name,
|
||||
connection_string=self.connection_string,
|
||||
) # mypy: ignore
|
||||
) # mypy: ignore
|
||||
|
||||
def similar_search(
|
||||
self, text: str, topk: int, filters: Optional[MetadataFilters] = None
|
||||
|
41
docs/docs/installation/advanced_usage/ollama.md
Normal file
41
docs/docs/installation/advanced_usage/ollama.md
Normal file
@ -0,0 +1,41 @@
|
||||
# ollama
|
||||
ollama is a model serving platform that allows you to deploy models in a few seconds.
|
||||
It is a great tool.
|
||||
|
||||
### Install ollama
|
||||
If your system is linux.
|
||||
```bash
|
||||
curl -fsSL https://ollama.com/install.sh | sh
|
||||
```
|
||||
other environments, please refer to the [official ollama website](https://ollama.com/).
|
||||
### Pull models.
|
||||
1. Pull LLM
|
||||
```bash
|
||||
ollama pull qwen:0.5b
|
||||
```
|
||||
2. Pull embedding model.
|
||||
```bash
|
||||
ollama pull nomic-embed-text
|
||||
```
|
||||
|
||||
3. install ollama package.
|
||||
```bash
|
||||
pip install ollama
|
||||
```
|
||||
|
||||
### Use ollama proxy model in DB-GPT `.env` file
|
||||
|
||||
```bash
|
||||
LLM_MODEL=ollama_proxyllm
|
||||
PROXY_SERVER_URL=http://127.0.0.1:11434
|
||||
PROXYLLM_BACKEND="qwen:0.5b"
|
||||
PROXY_API_KEY=not_used
|
||||
EMBEDDING_MODEL=proxy_ollama
|
||||
proxy_ollama_proxy_server_url=http://127.0.0.1:11434
|
||||
proxy_ollama_proxy_backend="nomic-embed-text:latest"
|
||||
```
|
||||
|
||||
### run dbgpt server
|
||||
```bash
|
||||
python dbgpt/app/dbgpt_server.py
|
||||
```
|
@ -237,6 +237,10 @@ const sidebars = {
|
||||
type: 'doc',
|
||||
id: 'installation/advanced_usage/More_proxyllms',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
id: 'installation/advanced_usage/ollama',
|
||||
},
|
||||
{
|
||||
type: 'doc',
|
||||
id: 'installation/advanced_usage/vLLM_inference',
|
||||
|
Loading…
Reference in New Issue
Block a user