mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-20 01:49:51 +00:00
add test for file search
This commit is contained in:
parent
307dffdc92
commit
86baae9c4d
@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
from typing import Any, AsyncIterator, List, Literal, Optional, cast
|
from typing import Any, AsyncIterator, List, Literal, Optional, cast
|
||||||
@ -1238,8 +1239,17 @@ def _check_response(response: Optional[BaseMessage]) -> None:
|
|||||||
if block["type"] == "text":
|
if block["type"] == "text":
|
||||||
assert isinstance(block["text"], str)
|
assert isinstance(block["text"], str)
|
||||||
for annotation in block["annotations"]:
|
for annotation in block["annotations"]:
|
||||||
for key in ["end_index", "start_index", "title", "type", "url"]:
|
if annotation["type"] == "file_citation":
|
||||||
assert key in annotation
|
assert all(
|
||||||
|
key in annotation
|
||||||
|
for key in ["file_id", "filename", "index", "type"]
|
||||||
|
)
|
||||||
|
elif annotation["type"] == "web_search":
|
||||||
|
assert all(
|
||||||
|
key in annotation
|
||||||
|
for key in ["end_index", "start_index", "title", "type", "url"]
|
||||||
|
)
|
||||||
|
|
||||||
text_content = response.text()
|
text_content = response.text()
|
||||||
assert isinstance(text_content, str)
|
assert isinstance(text_content, str)
|
||||||
assert text_content
|
assert text_content
|
||||||
@ -1293,3 +1303,20 @@ async def test_web_search_async() -> None:
|
|||||||
full = chunk if full is None else full + chunk
|
full = chunk if full is None else full + chunk
|
||||||
assert isinstance(full, AIMessageChunk)
|
assert isinstance(full, AIMessageChunk)
|
||||||
_check_response(full)
|
_check_response(full)
|
||||||
|
|
||||||
|
|
||||||
|
def test_file_search() -> None:
|
||||||
|
llm = ChatOpenAI(model="gpt-4o")
|
||||||
|
tool = {
|
||||||
|
"type": "file_search",
|
||||||
|
"vector_store_ids": [os.environ["OPENAI_VECTOR_STORE_ID"]],
|
||||||
|
}
|
||||||
|
response = llm.invoke("What is deep research by OpenAI?", tools=[tool])
|
||||||
|
_check_response(response)
|
||||||
|
|
||||||
|
full: Optional[BaseMessageChunk] = None
|
||||||
|
for chunk in llm.stream("What is deep research by OpenAI?", tools=[tool]):
|
||||||
|
assert isinstance(chunk, AIMessageChunk)
|
||||||
|
full = chunk if full is None else full + chunk
|
||||||
|
assert isinstance(full, AIMessageChunk)
|
||||||
|
_check_response(full)
|
||||||
|
Loading…
Reference in New Issue
Block a user