mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-08-02 00:28:00 +00:00
fix run path
This commit is contained in:
parent
b5139ea643
commit
9ddd089381
@ -1,7 +1,12 @@
|
|||||||
from pilot.source_embedding import (SourceEmbedding, register)
|
from pilot.source_embedding import (SourceEmbedding, register)
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
sys.path.append(ROOT_PATH)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"SourceEmbedding",
|
"SourceEmbedding",
|
||||||
"register"
|
"register"
|
||||||
]
|
]
|
||||||
|
@ -24,7 +24,7 @@ class BaseLLMAdaper:
|
|||||||
return model, tokenizer
|
return model, tokenizer
|
||||||
|
|
||||||
|
|
||||||
llm_model_adapters = List[BaseLLMAdaper] = []
|
llm_model_adapters: List[BaseLLMAdaper] = []
|
||||||
|
|
||||||
# Register llm models to adapters, by this we can use multi models.
|
# Register llm models to adapters, by this we can use multi models.
|
||||||
def register_llm_model_adapters(cls):
|
def register_llm_model_adapters(cls):
|
||||||
|
@ -1,14 +1,23 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
import uvicorn
|
import uvicorn
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
|
import sys
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from fastapi import FastAPI, Request, BackgroundTasks
|
from fastapi import FastAPI, Request, BackgroundTasks
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from pilot.model.inference import generate_stream
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
global_counter = 0
|
||||||
|
model_semaphore = None
|
||||||
|
|
||||||
|
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
sys.path.append(ROOT_PATH)
|
||||||
|
|
||||||
|
from pilot.model.inference import generate_stream
|
||||||
from pilot.model.inference import generate_output, get_embeddings
|
from pilot.model.inference import generate_output, get_embeddings
|
||||||
|
|
||||||
from pilot.model.loader import ModelLoader
|
from pilot.model.loader import ModelLoader
|
||||||
@ -19,10 +28,6 @@ from pilot.configs.config import Config
|
|||||||
CFG = Config()
|
CFG = Config()
|
||||||
model_path = LLM_MODEL_CONFIG[CFG.LLM_MODEL]
|
model_path = LLM_MODEL_CONFIG[CFG.LLM_MODEL]
|
||||||
|
|
||||||
|
|
||||||
global_counter = 0
|
|
||||||
model_semaphore = None
|
|
||||||
|
|
||||||
ml = ModelLoader(model_path=model_path)
|
ml = ModelLoader(model_path=model_path)
|
||||||
model, tokenizer = ml.loader(num_gpus=1, load_8bit=ISLOAD_8BIT, debug=ISDEBUG)
|
model, tokenizer = ml.loader(num_gpus=1, load_8bit=ISLOAD_8BIT, debug=ISDEBUG)
|
||||||
#model, tokenizer = load_model(model_path=model_path, device=DEVICE, num_gpus=1, load_8bit=True, debug=False)
|
#model, tokenizer = load_model(model_path=model_path, device=DEVICE, num_gpus=1, load_8bit=True, debug=False)
|
||||||
|
@ -13,6 +13,11 @@ import requests
|
|||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from langchain import PromptTemplate
|
from langchain import PromptTemplate
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
sys.path.append(ROOT_PATH)
|
||||||
|
|
||||||
from pilot.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH, LLM_MODEL_CONFIG
|
from pilot.configs.model_config import KNOWLEDGE_UPLOAD_ROOT_PATH, LLM_MODEL_CONFIG
|
||||||
from pilot.server.vectordb_qa import KnownLedgeBaseQA
|
from pilot.server.vectordb_qa import KnownLedgeBaseQA
|
||||||
@ -30,6 +35,8 @@ from pilot.prompts.generator import PromptGenerator
|
|||||||
|
|
||||||
from pilot.commands.exception_not_commands import NotCommands
|
from pilot.commands.exception_not_commands import NotCommands
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
from pilot.conversation import (
|
from pilot.conversation import (
|
||||||
default_conversation,
|
default_conversation,
|
||||||
conv_templates,
|
conv_templates,
|
||||||
|
Loading…
Reference in New Issue
Block a user