mirror of
				https://github.com/hwchase17/langchain.git
				synced 2025-10-30 23:29:54 +00:00 
			
		
		
		
	- **Description:** Baichuan Chat (with both Baichuan-Turbo and Baichuan-Turbo-192K models) has updated their APIs. There are breaking changes. For example, BAICHUAN_SECRET_KEY is removed in the latest API but is still required in Langchain. Baichuan's Langchain integration needs to be updated to the latest version. - **Issue:** #15206 - **Dependencies:** None, - **Twitter handle:** None @hwchase17. Co-authored-by: BaiChuanHelper <wintergyc@WinterGYCs-MacBook-Pro.local>
		
			
				
	
	
		
			63 lines
		
	
	
		
			2.1 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			63 lines
		
	
	
		
			2.1 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| from langchain_core.messages import AIMessage, HumanMessage
 | ||
| 
 | ||
| from langchain_community.chat_models.baichuan import ChatBaichuan
 | ||
| 
 | ||
| # For testing, run:
 | ||
| # TEST_FILE=tests/integration_tests/chat_models/test_baichuan.py make test
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_default() -> None:
 | ||
|     chat = ChatBaichuan(streaming=True)
 | ||
|     message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
 | ||
|     response = chat([message])
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_default_non_streaming() -> None:
 | ||
|     chat = ChatBaichuan()
 | ||
|     message = HumanMessage(content="请完整背诵将进酒,背诵5遍")
 | ||
|     response = chat([message])
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_turbo() -> None:
 | ||
|     chat = ChatBaichuan(model="Baichuan2-Turbo", streaming=True)
 | ||
|     message = HumanMessage(content="Hello")
 | ||
|     response = chat([message])
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_turbo_non_streaming() -> None:
 | ||
|     chat = ChatBaichuan(model="Baichuan2-Turbo")
 | ||
|     message = HumanMessage(content="Hello")
 | ||
|     response = chat([message])
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_with_temperature() -> None:
 | ||
|     chat = ChatBaichuan(temperature=1.0)
 | ||
|     message = HumanMessage(content="Hello")
 | ||
|     response = chat([message])
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_chat_baichuan_with_kwargs() -> None:
 | ||
|     chat = ChatBaichuan()
 | ||
|     message = HumanMessage(content="百川192K API是什么时候上线的?")
 | ||
|     response = chat([message], temperature=0.88, top_p=0.7, with_search_enhance=True)
 | ||
|     print(response)
 | ||
|     assert isinstance(response, AIMessage)
 | ||
|     assert isinstance(response.content, str)
 | ||
| 
 | ||
| 
 | ||
| def test_extra_kwargs() -> None:
 | ||
|     chat = ChatBaichuan(temperature=0.88, top_p=0.7, with_search_enhance=True)
 | ||
|     assert chat.temperature == 0.88
 | ||
|     assert chat.top_p == 0.7
 | ||
|     assert chat.with_search_enhance is True
 |