mirror of
https://github.com/hwchase17/langchain.git
synced 2025-07-31 00:29:57 +00:00
partner: Upstage quick documentation update (#20869)
* Updating the provider docs page. The RAG example was meant to be moved to cookbook, but was merged by mistake. * Fix bug in Groundedness Check --------- Co-authored-by: JuHyung-Son <sonju0427@gmail.com> Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
parent
ffad3985a1
commit
540f384197
@ -53,7 +53,7 @@
|
||||
"| Chat | Build assistants using Solar Mini Chat | `from langchain_upstage import ChatUpstage` | [Go](../../chat/upstage) |\n",
|
||||
"| Text Embedding | Embed strings to vectors | `from langchain_upstage import UpstageEmbeddings` | [Go](../../text_embedding/upstage) |\n",
|
||||
"| Groundedness Check | Verify groundedness of assistant's response | `from langchain_upstage import GroundednessCheck` | [Go](../../tools/upstage_groundedness_check) |\n",
|
||||
"| Layout Analysis | Serialize documents with tables and figures | `from langchain_upstage import UpstageLayoutAnalysis` | [Go](../../document_loaders/upstage) |\n",
|
||||
"| Layout Analysis | Serialize documents with tables and figures | `from langchain_upstage import UpstageLayoutAnalysisLoader` | [Go](../../document_loaders/upstage) |\n",
|
||||
"\n",
|
||||
"See [documentations](https://developers.upstage.ai/) for more details about the features."
|
||||
]
|
||||
@ -172,10 +172,10 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_upstage import UpstageLayoutAnalysis\n",
|
||||
"from langchain_upstage import UpstageLayoutAnalysisLoader\n",
|
||||
"\n",
|
||||
"file_path = \"/PATH/TO/YOUR/FILE.pdf\"\n",
|
||||
"layzer = UpstageLayoutAnalysis(file_path, split=\"page\")\n",
|
||||
"layzer = UpstageLayoutAnalysisLoader(file_path, split=\"page\")\n",
|
||||
"\n",
|
||||
"# For improved memory efficiency, consider using the lazy_load method to load documents page by page.\n",
|
||||
"docs = layzer.load() # or layzer.lazy_load()\n",
|
||||
@ -183,81 +183,6 @@
|
||||
"for doc in docs[:3]:\n",
|
||||
" print(doc)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"### Retrieval-Augmented Generation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from typing import List\n",
|
||||
"\n",
|
||||
"from langchain_community.vectorstores import DocArrayInMemorySearch\n",
|
||||
"from langchain_core.documents.base import Document\n",
|
||||
"from langchain_core.output_parsers import StrOutputParser\n",
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"from langchain_core.runnables import RunnablePassthrough\n",
|
||||
"from langchain_core.runnables.base import RunnableSerializable\n",
|
||||
"from langchain_upstage import (\n",
|
||||
" ChatUpstage,\n",
|
||||
" GroundednessCheck,\n",
|
||||
" UpstageEmbeddings,\n",
|
||||
" UpstageLayoutAnalysis,\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"model = ChatUpstage()\n",
|
||||
"\n",
|
||||
"file_path = (\n",
|
||||
" \"/PATH/TO/YOUR/FILE.pdf\" # e.g. \"libs/partners/upstage/tests/examples/solar.pdf\"\n",
|
||||
")\n",
|
||||
"loader = UpstageLayoutAnalysis(file_path=file_path, split=\"element\")\n",
|
||||
"docs = loader.load()\n",
|
||||
"\n",
|
||||
"vectorstore = DocArrayInMemorySearch.from_documents(docs, embedding=UpstageEmbeddings())\n",
|
||||
"retriever = vectorstore.as_retriever()\n",
|
||||
"\n",
|
||||
"template = \"\"\"Answer the question based only on the following context:\n",
|
||||
"{context}\n",
|
||||
"\n",
|
||||
"Question: {question}\n",
|
||||
"\"\"\"\n",
|
||||
"prompt = ChatPromptTemplate.from_template(template)\n",
|
||||
"output_parser = StrOutputParser()\n",
|
||||
"\n",
|
||||
"question = \"ASK ANY QUESTION HERE.\" # e.g. \"How many parameters in SOLAR model?\"\n",
|
||||
"\n",
|
||||
"retrieved_docs = retriever.get_relevant_documents(question)\n",
|
||||
"\n",
|
||||
"groundedness_check = GroundednessCheck()\n",
|
||||
"groundedness = \"\"\n",
|
||||
"while groundedness != \"grounded\":\n",
|
||||
" chain: RunnableSerializable = RunnablePassthrough() | prompt | model | output_parser\n",
|
||||
"\n",
|
||||
" result = chain.invoke(\n",
|
||||
" {\n",
|
||||
" \"context\": retrieved_docs,\n",
|
||||
" \"question\": question,\n",
|
||||
" }\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" # convert all Documents to string\n",
|
||||
" def formatDocumentsAsString(docs: List[Document]) -> str:\n",
|
||||
" return \"\\n\".join([doc.page_content for doc in docs])\n",
|
||||
"\n",
|
||||
" groundedness = groundedness_check.run(\n",
|
||||
" {\n",
|
||||
" \"context\": formatDocumentsAsString(retrieved_docs),\n",
|
||||
" \"assistant_message\": result,\n",
|
||||
" }\n",
|
||||
" ).content"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
@ -1,5 +1,5 @@
|
||||
import os
|
||||
from typing import Literal, Optional, Type, Union
|
||||
from typing import Any, Literal, Optional, Type, Union
|
||||
|
||||
from langchain_core.callbacks import (
|
||||
AsyncCallbackManagerForToolRun,
|
||||
@ -8,6 +8,7 @@ from langchain_core.callbacks import (
|
||||
from langchain_core.messages import AIMessage, HumanMessage
|
||||
from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr
|
||||
from langchain_core.tools import BaseTool
|
||||
from langchain_core.utils import convert_to_secret_str
|
||||
|
||||
from langchain_upstage import ChatUpstage
|
||||
|
||||
@ -51,11 +52,14 @@ class GroundednessCheck(BaseTool):
|
||||
|
||||
args_schema: Type[BaseModel] = GroundednessCheckInput
|
||||
|
||||
def __init__(self, upstage_api_key: Optional[SecretStr] = None):
|
||||
def __init__(self, **kwargs: Any) -> None:
|
||||
upstage_api_key = kwargs.get("upstage_api_key", None)
|
||||
if not upstage_api_key:
|
||||
upstage_api_key = kwargs.get("api_key", None)
|
||||
if not upstage_api_key:
|
||||
upstage_api_key = SecretStr(os.getenv("UPSTAGE_API_KEY", ""))
|
||||
else:
|
||||
upstage_api_key = upstage_api_key
|
||||
upstage_api_key = convert_to_secret_str(upstage_api_key)
|
||||
|
||||
if (
|
||||
not upstage_api_key
|
||||
or not upstage_api_key.get_secret_value()
|
||||
@ -76,7 +80,9 @@ class GroundednessCheck(BaseTool):
|
||||
run_manager: Optional[CallbackManagerForToolRun] = None,
|
||||
) -> Union[str, Literal["grounded", "notGrounded", "notSure"]]:
|
||||
"""Use the tool."""
|
||||
response = self.api_wrapper.invoke([HumanMessage(context), AIMessage(query)])
|
||||
response = self.api_wrapper.invoke(
|
||||
[HumanMessage(context), AIMessage(query)], stream=False
|
||||
)
|
||||
return str(response.content)
|
||||
|
||||
async def _arun(
|
||||
@ -86,6 +92,6 @@ class GroundednessCheck(BaseTool):
|
||||
run_manager: Optional[AsyncCallbackManagerForToolRun] = None,
|
||||
) -> Union[str, Literal["grounded", "notGrounded", "notSure"]]:
|
||||
response = await self.api_wrapper.ainvoke(
|
||||
[HumanMessage(context), AIMessage(query)]
|
||||
[HumanMessage(context), AIMessage(query)], stream=False
|
||||
)
|
||||
return str(response.content)
|
||||
|
@ -1,3 +1,8 @@
|
||||
import os
|
||||
|
||||
import openai
|
||||
import pytest
|
||||
|
||||
from langchain_upstage import GroundednessCheck
|
||||
|
||||
|
||||
@ -8,6 +13,19 @@ def test_langchain_upstage_groundedness_check() -> None:
|
||||
|
||||
assert output in ["grounded", "notGrounded", "notSure"]
|
||||
|
||||
api_key = os.environ.get("UPSTAGE_API_KEY", None)
|
||||
|
||||
tool = GroundednessCheck(upstage_api_key=api_key)
|
||||
output = tool.run({"context": "foo bar", "query": "bar foo"})
|
||||
|
||||
assert output in ["grounded", "notGrounded", "notSure"]
|
||||
|
||||
|
||||
def test_langchain_upstage_groundedness_check_fail_with_wrong_api_key() -> None:
|
||||
tool = GroundednessCheck(api_key="wrong-key")
|
||||
with pytest.raises(openai.AuthenticationError):
|
||||
tool.run({"context": "foo bar", "query": "bar foo"})
|
||||
|
||||
|
||||
async def test_langchain_upstage_groundedness_check_async() -> None:
|
||||
"""Test Upstage Groundedness Check asynchronous."""
|
@ -8,3 +8,5 @@ os.environ["UPSTAGE_API_KEY"] = "foo"
|
||||
def test_initialization() -> None:
|
||||
"""Test embedding model initialization."""
|
||||
GroundednessCheck()
|
||||
GroundednessCheck(upstage_api_key="key")
|
||||
GroundednessCheck(api_key="key")
|
||||
|
Loading…
Reference in New Issue
Block a user