Add OpenLLM wrapper(#6578)

LLM wrapper for models served with OpenLLM

---------

Signed-off-by: Aaron <29749331+aarnphm@users.noreply.github.com>
Authored-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
Co-authored-by: Chaoyu <paranoyang@gmail.com>
This commit is contained in:
Davis Chase
2023-06-22 01:18:14 -07:00
committed by GitHub
parent d718f3b6d0
commit 4fabd02d25
9 changed files with 1227 additions and 98 deletions

View File

@@ -0,0 +1,16 @@
"""Test OpenLLM wrapper."""
from langchain.llms.openllm import OpenLLM
def test_openllm_llm_local() -> None:
llm = OpenLLM(model_name="flan-t5", model_id="google/flan-t5-small")
output = llm("Say foo:")
assert isinstance(output, str)
def test_openllm_with_kwargs() -> None:
llm = OpenLLM(
model_name="flan-t5", model_id="google/flan-t5-small", temperature=0.84
)
output = llm("Say bar:")
assert isinstance(output, str)