mirror of
https://github.com/csunny/DB-GPT.git
synced 2025-07-31 07:34:07 +00:00
update
This commit is contained in:
parent
9313174605
commit
5fe17c7c1c
@ -1,17 +1,19 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import abc
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
from pilot.model.llm.base import Message
|
from pilot.model.llm.base import Message
|
||||||
|
from pilot.conversation import conv_templates, Conversation, conv_one_shot, auto_dbgpt_one_shot
|
||||||
from pilot.configs.config import Config
|
from pilot.configs.config import Config
|
||||||
|
|
||||||
# Overly simple abstraction util we create something better
|
# Overly simple abstraction util we create something better
|
||||||
# simple retry mechanism when getting a rate error or a bad gateway
|
# simple retry mechanism when getting a rate error or a bad gateway
|
||||||
def create_chat_competion(
|
def create_chat_competion(
|
||||||
messages: List[Message],
|
conv: Conversation,
|
||||||
model: Optional[str] = None,
|
model: Optional[str] = None,
|
||||||
temperature: float = None,
|
temperature: float = None,
|
||||||
max_tokens: Optional[int] = None,
|
max_new_tokens: Optional[int] = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Create a chat completion using the Vicuna-13b
|
"""Create a chat completion using the Vicuna-13b
|
||||||
|
|
||||||
@ -27,6 +29,42 @@ def create_chat_competion(
|
|||||||
cfg = Config()
|
cfg = Config()
|
||||||
if temperature is None:
|
if temperature is None:
|
||||||
temperature = cfg.temperature
|
temperature = cfg.temperature
|
||||||
|
|
||||||
# TODO request vicuna model get response
|
# TODO request vicuna model get response
|
||||||
|
for plugin in cfg.plugins:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ChatIO(abc.ABC):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def prompt_for_input(self, role: str) -> str:
|
||||||
|
"""Prompt for input from a role."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def prompt_for_output(self, role: str) -> str:
|
||||||
|
"""Prompt for output from a role."""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def stream_output(self, output_stream, skip_echo_len: int):
|
||||||
|
"""Stream output."""
|
||||||
|
|
||||||
|
|
||||||
|
class SimpleChatIO(ChatIO):
|
||||||
|
def prompt_for_input(self, role: str) -> str:
|
||||||
|
return input(f"{role}: ")
|
||||||
|
|
||||||
|
def prompt_for_output(self, role: str) -> str:
|
||||||
|
print(f"{role}: ", end="", flush=True)
|
||||||
|
|
||||||
|
def stream_output(self, output_stream, skip_echo_len: int):
|
||||||
|
pre = 0
|
||||||
|
for outputs in output_stream:
|
||||||
|
outputs = outputs[skip_echo_len:].strip()
|
||||||
|
now = len(outputs) - 1
|
||||||
|
if now > pre:
|
||||||
|
print(" ".join(outputs[pre:now]), end=" ", flush=True)
|
||||||
|
pre = now
|
||||||
|
|
||||||
|
print(" ".join(outputs[pre:]), flush=True)
|
||||||
|
return " ".join(outputs)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user