mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-13 13:36:15 +00:00
Add LLM for ChatGLM(2)-6B API (#7774)
Description: Add LLM for ChatGLM-6B & ChatGLM2-6B API Related Issue: Will the langchain support ChatGLM? #4766 Add support for selfhost models like ChatGLM or transformer models #1780 Dependencies: No extra library install required. It wraps api call to a ChatGLM(2)-6B server(start with api.py), so api endpoint is required to run. Tag maintainer: @mlot Any comments on this PR would be appreciated. --------- Co-authored-by: mlot <limpo2000@gmail.com> Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
18
tests/integration_tests/llms/test_chatglm.py
Normal file
18
tests/integration_tests/llms/test_chatglm.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Test ChatGLM API wrapper."""
|
||||
from langchain.llms.chatglm import ChatGLM
|
||||
from langchain.schema import LLMResult
|
||||
|
||||
|
||||
def test_chatglm_call() -> None:
|
||||
"""Test valid call to chatglm."""
|
||||
llm = ChatGLM()
|
||||
output = llm("北京和上海这两座城市有什么不同?")
|
||||
assert isinstance(output, str)
|
||||
|
||||
|
||||
def test_chatglm_generate() -> None:
|
||||
"""Test valid call to chatglm."""
|
||||
llm = ChatGLM()
|
||||
output = llm.generate(["who are you"])
|
||||
assert isinstance(output, LLMResult)
|
||||
assert isinstance(output.generations, list)
|
Reference in New Issue
Block a user