community: fix some features on Naver ChatModel & embedding model 2 (#29243)

## Description
- Responding to `NCP API Key` changes.
- To fix `ChatClovaX` `astream` function to raise `SSEError` when an
error event occurs.
- To add `token length` and `ai_filter` to ChatClovaX's
`response_metadata`.
- To update document for apply NCP API Key changes.

cc. @efriis @vbarda
This commit is contained in:
CLOVA Studio 개발
2025-01-21 01:01:03 +09:00
committed by GitHub
parent 5d64597490
commit 7a95ffc775
7 changed files with 240 additions and 97 deletions

View File

@@ -1,71 +1,131 @@
"""Test ChatNaver chat model."""
"""Test ChatClovaX chat model."""
from langchain_core.messages import AIMessage, AIMessageChunk
import pytest
from httpx_sse import SSEError
from langchain_core.messages import (
AIMessage,
AIMessageChunk,
)
from langchain_community.chat_models import ChatClovaX
def test_stream() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
for token in llm.stream("I'm Clova"):
assert isinstance(token, AIMessageChunk)
assert isinstance(token.content, str)
if token.response_metadata:
assert "input_length" in token.response_metadata
assert "output_length" in token.response_metadata
assert "stop_reason" in token.response_metadata
assert "ai_filter" in token.response_metadata
async def test_astream() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
async for token in llm.astream("I'm Clova"):
assert isinstance(token, AIMessageChunk)
assert isinstance(token.content, str)
if token.response_metadata:
assert "input_length" in token.response_metadata
assert "output_length" in token.response_metadata
assert "stop_reason" in token.response_metadata
assert "ai_filter" in token.response_metadata
async def test_abatch() -> None:
"""Test streaming tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
result = await llm.abatch(["I'm Clova", "I'm not Clova"])
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
if token.response_metadata:
assert "input_length" in token.response_metadata
assert "output_length" in token.response_metadata
assert "stop_reason" in token.response_metadata
assert "ai_filter" in token.response_metadata
async def test_abatch_tags() -> None:
"""Test batch tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
result = await llm.abatch(["I'm Clova", "I'm not Clova"], config={"tags": ["foo"]})
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
if token.response_metadata:
assert "input_length" in token.response_metadata
assert "output_length" in token.response_metadata
assert "stop_reason" in token.response_metadata
assert "ai_filter" in token.response_metadata
def test_batch() -> None:
"""Test batch tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
result = llm.batch(["I'm Clova", "I'm not Clova"])
for token in result:
assert isinstance(token, AIMessage)
assert isinstance(token.content, str)
if token.response_metadata:
assert "input_length" in token.response_metadata
assert "output_length" in token.response_metadata
assert "stop_reason" in token.response_metadata
assert "ai_filter" in token.response_metadata
async def test_ainvoke() -> None:
"""Test invoke tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
result = await llm.ainvoke("I'm Clova", config={"tags": ["foo"]})
assert isinstance(result, AIMessage)
assert isinstance(result.content, str)
if result.response_metadata:
assert "input_length" in result.response_metadata
assert "output_length" in result.response_metadata
assert "stop_reason" in result.response_metadata
assert "ai_filter" in result.response_metadata
def test_invoke() -> None:
"""Test invoke tokens from ChatClovaX."""
llm = ChatClovaX()
llm = ChatClovaX(include_ai_filters=True)
result = llm.invoke("I'm Clova", config=dict(tags=["foo"]))
assert isinstance(result, AIMessage)
assert isinstance(result.content, str)
if result.response_metadata:
assert "input_length" in result.response_metadata
assert "output_length" in result.response_metadata
assert "stop_reason" in result.response_metadata
assert "ai_filter" in result.response_metadata
def test_stream_error_event() -> None:
"""Test streaming error event from ChatClovaX."""
llm = ChatClovaX()
prompt = "What is the best way to reduce my carbon footprint?"
with pytest.raises(SSEError):
for _ in llm.stream(prompt * 1000):
pass
async def test_astream_error_event() -> None:
"""Test streaming error event from ChatClovaX."""
llm = ChatClovaX()
prompt = "What is the best way to reduce my carbon footprint?"
with pytest.raises(SSEError):
async for _ in llm.astream(prompt * 1000):
pass

View File

@@ -4,7 +4,7 @@ from langchain_community.embeddings import ClovaXEmbeddings
def test_embedding_documents() -> None:
"""Test cohere embeddings."""
"""Test ClovaX embeddings."""
documents = ["foo bar"]
embedding = ClovaXEmbeddings()
output = embedding.embed_documents(documents)
@@ -13,7 +13,7 @@ def test_embedding_documents() -> None:
async def test_aembedding_documents() -> None:
"""Test cohere embeddings."""
"""Test ClovaX embeddings."""
documents = ["foo bar"]
embedding = ClovaXEmbeddings()
output = await embedding.aembed_documents(documents)
@@ -22,7 +22,7 @@ async def test_aembedding_documents() -> None:
def test_embedding_query() -> None:
"""Test cohere embeddings."""
"""Test ClovaX embeddings."""
document = "foo bar"
embedding = ClovaXEmbeddings()
output = embedding.embed_query(document)
@@ -30,7 +30,7 @@ def test_embedding_query() -> None:
async def test_aembedding_query() -> None:
"""Test cohere embeddings."""
"""Test ClovaX embeddings."""
document = "foo bar"
embedding = ClovaXEmbeddings()
output = await embedding.aembed_query(document)