mirror of
https://github.com/hwchase17/langchain.git
synced 2026-02-13 06:16:26 +00:00
Compare commits
20 Commits
eugene/upd
...
langchain-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40df0249fb | ||
|
|
f298cd3446 | ||
|
|
e19b6ad266 | ||
|
|
8defdb10d8 | ||
|
|
458d6f76ad | ||
|
|
bde3dbaed2 | ||
|
|
6b24eeb884 | ||
|
|
d82eec6aad | ||
|
|
da037f1c55 | ||
|
|
21a43ee3dc | ||
|
|
28594567de | ||
|
|
cacd68b2a7 | ||
|
|
1ee432e904 | ||
|
|
0bfbd4f7bd | ||
|
|
7ededced7d | ||
|
|
284e1a7e9e | ||
|
|
d67e1dfe32 | ||
|
|
df4fc60312 | ||
|
|
c074922876 | ||
|
|
bbb7c267e5 |
@@ -90,7 +90,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"# Please manually enter OpenAI Key"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -8,4 +8,4 @@ myst-nb>=1.1.1
|
||||
pyyaml
|
||||
sphinx-design
|
||||
sphinx-copybutton
|
||||
beautifulso[up4
|
||||
beautifulsoup4
|
||||
|
||||
@@ -45,7 +45,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -49,7 +49,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -50,7 +50,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI()"
|
||||
]
|
||||
|
||||
@@ -58,7 +58,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -281,7 +282,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
"if \"ANTHROPIC_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -66,7 +66,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -54,7 +54,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -25,7 +25,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"# Please manually enter OpenAI Key"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -11,12 +11,30 @@
|
||||
"\n",
|
||||
"The `merge_message_runs` utility makes it easy to merge consecutive messages of the same type.\n",
|
||||
"\n",
|
||||
"### Setup"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "198ce37f-4466-45a2-8878-d75cd01a5d23",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"%pip install -qU langchain-core langchain-anthropic"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "b5c3ca6e-e5b3-4151-8307-9101713a20ae",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Basic usage"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": 8,
|
||||
"id": "1a215bbb-c05c-40b0-a6fd-d94884d517df",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
@@ -24,11 +42,11 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"SystemMessage(content=\"you're a good assistant.\\nyou always respond with a joke.\")\n",
|
||||
"SystemMessage(content=\"you're a good assistant.\\nyou always respond with a joke.\", additional_kwargs={}, response_metadata={})\n",
|
||||
"\n",
|
||||
"HumanMessage(content=[{'type': 'text', 'text': \"i wonder why it's called langchain\"}, 'and who is harrison chasing anyways'])\n",
|
||||
"HumanMessage(content=[{'type': 'text', 'text': \"i wonder why it's called langchain\"}, 'and who is harrison chasing anyways'], additional_kwargs={}, response_metadata={})\n",
|
||||
"\n",
|
||||
"AIMessage(content='Well, I guess they thought \"WordRope\" and \"SentenceString\" just didn\\'t have the same ring to it!\\nWhy, he\\'s probably chasing after the last cup of coffee in the office!')\n"
|
||||
"AIMessage(content='Well, I guess they thought \"WordRope\" and \"SentenceString\" just didn\\'t have the same ring to it!\\nWhy, he\\'s probably chasing after the last cup of coffee in the office!', additional_kwargs={}, response_metadata={})\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
@@ -63,38 +81,6 @@
|
||||
"Notice that if the contents of one of the messages to merge is a list of content blocks then the merged message will have a list of content blocks. And if both messages to merge have string contents then those are concatenated with a newline character."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "11f7e8d3",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"The `merge_message_runs` utility also works with messages composed together using the overloaded `+` operation:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b51855c5",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"messages = (\n",
|
||||
" SystemMessage(\"you're a good assistant.\")\n",
|
||||
" + SystemMessage(\"you always respond with a joke.\")\n",
|
||||
" + HumanMessage([{\"type\": \"text\", \"text\": \"i wonder why it's called langchain\"}])\n",
|
||||
" + HumanMessage(\"and who is harrison chasing anyways\")\n",
|
||||
" + AIMessage(\n",
|
||||
" 'Well, I guess they thought \"WordRope\" and \"SentenceString\" just didn\\'t have the same ring to it!'\n",
|
||||
" )\n",
|
||||
" + AIMessage(\n",
|
||||
" \"Why, he's probably chasing after the last cup of coffee in the office!\"\n",
|
||||
" )\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"merged = merge_message_runs(messages)\n",
|
||||
"print(\"\\n\\n\".join([repr(x) for x in merged]))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "1b2eee74-71c8-4168-b968-bca580c25d18",
|
||||
@@ -107,23 +93,30 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 9,
|
||||
"id": "6d5a0283-11f8-435b-b27b-7b18f7693592",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Note: you may need to restart the kernel to use updated packages.\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content=[], response_metadata={'id': 'msg_01D6R8Naum57q8qBau9vLBUX', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 84, 'output_tokens': 3}}, id='run-ac0c465b-b54f-4b8b-9295-e5951250d653-0', usage_metadata={'input_tokens': 84, 'output_tokens': 3, 'total_tokens': 87})"
|
||||
"AIMessage(content=[], additional_kwargs={}, response_metadata={'id': 'msg_01KNGUMTuzBVfwNouLDpUMwf', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 84, 'output_tokens': 3}}, id='run-b908b198-9c24-450b-9749-9d4a8182937b-0', usage_metadata={'input_tokens': 84, 'output_tokens': 3, 'total_tokens': 87})"
|
||||
]
|
||||
},
|
||||
"execution_count": 3,
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# pip install -U langchain-anthropic\n",
|
||||
"%pip install -qU langchain-anthropic\n",
|
||||
"from langchain_anthropic import ChatAnthropic\n",
|
||||
"\n",
|
||||
"llm = ChatAnthropic(model=\"claude-3-sonnet-20240229\", temperature=0)\n",
|
||||
@@ -146,19 +139,19 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 10,
|
||||
"id": "460817a6-c327-429d-958e-181a8c46059c",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"[SystemMessage(content=\"you're a good assistant.\\nyou always respond with a joke.\"),\n",
|
||||
" HumanMessage(content=[{'type': 'text', 'text': \"i wonder why it's called langchain\"}, 'and who is harrison chasing anyways']),\n",
|
||||
" AIMessage(content='Well, I guess they thought \"WordRope\" and \"SentenceString\" just didn\\'t have the same ring to it!\\nWhy, he\\'s probably chasing after the last cup of coffee in the office!')]"
|
||||
"[SystemMessage(content=\"you're a good assistant.\\nyou always respond with a joke.\", additional_kwargs={}, response_metadata={}),\n",
|
||||
" HumanMessage(content=[{'type': 'text', 'text': \"i wonder why it's called langchain\"}, 'and who is harrison chasing anyways'], additional_kwargs={}, response_metadata={}),\n",
|
||||
" AIMessage(content='Well, I guess they thought \"WordRope\" and \"SentenceString\" just didn\\'t have the same ring to it!\\nWhy, he\\'s probably chasing after the last cup of coffee in the office!', additional_kwargs={}, response_metadata={})]"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -167,6 +160,53 @@
|
||||
"merger.invoke(messages)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "4178837d-b155-492d-9404-d567accc1fa0",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"`merge_message_runs` can also be placed after a prompt:"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"id": "620530ab-ed05-4899-b984-bfa4cd738465",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"AIMessage(content='A convergent series is an infinite series whose partial sums approach a finite value as more terms are added. In other words, the sequence of partial sums has a limit.\\n\\nMore formally, an infinite series Σ an (where an are the terms of the series) is said to be convergent if the sequence of partial sums:\\n\\nS1 = a1\\nS2 = a1 + a2 \\nS3 = a1 + a2 + a3\\n...\\nSn = a1 + a2 + a3 + ... + an\\n...\\n\\nconverges to some finite number S as n goes to infinity. We write:\\n\\nlim n→∞ Sn = S\\n\\nThe finite number S is called the sum of the convergent infinite series.\\n\\nIf the sequence of partial sums does not approach any finite limit, the infinite series is said to be divergent.\\n\\nSome key properties:\\n- A series converges if and only if the sequence of its partial sums is a Cauchy sequence.\\n- Absolute/conditional convergence criteria help determine if a given series converges.\\n- Convergent series have many important applications in mathematics, physics, engineering etc.', additional_kwargs={}, response_metadata={'id': 'msg_01MfV6y2hep7ZNvDz24A36U4', 'model': 'claude-3-sonnet-20240229', 'stop_reason': 'end_turn', 'stop_sequence': None, 'usage': {'input_tokens': 29, 'output_tokens': 267}}, id='run-9d925f58-021e-4bd0-94fc-f8f5e91010a4-0', usage_metadata={'input_tokens': 29, 'output_tokens': 267, 'total_tokens': 296})"
|
||||
]
|
||||
},
|
||||
"execution_count": 14,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
"prompt = ChatPromptTemplate(\n",
|
||||
" [\n",
|
||||
" (\"system\", \"You're great a {skill}\"),\n",
|
||||
" (\"system\", \"You're also great at explaining things\"),\n",
|
||||
" (\"human\", \"{query}\"),\n",
|
||||
" ]\n",
|
||||
")\n",
|
||||
"chain = prompt | merger | llm\n",
|
||||
"chain.invoke({\"skill\": \"math\", \"query\": \"what's the definition of a convergent series\"})"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "51ba533a-43c7-4e5f-bd91-a4ec23ceeb34",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"LangSmith Trace: https://smith.langchain.com/public/432150b6-9909-40a7-8ae7-944b7e657438/r/f4ad5fb2-4d38-42a6-b780-25f62617d53f"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "4548d916-ce21-4dc6-8f19-eedb8003ace6",
|
||||
@@ -194,7 +234,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.1"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -65,9 +65,11 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import getpass\n",
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = \"sk-...\""
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass.getpass(\"OpenAI API key:\\n\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -1325,7 +1327,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.2"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -41,7 +41,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
"if \"ANTHROPIC_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -39,7 +39,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -60,7 +60,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -46,7 +46,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -62,7 +62,8 @@
|
||||
"\n",
|
||||
"from langchain_anthropic import ChatAnthropic\n",
|
||||
"\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = getpass()\n",
|
||||
"if \"ANTHROPIC_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"ANTHROPIC_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"model = ChatAnthropic(model=\"claude-3-sonnet-20240229\", temperature=0)"
|
||||
]
|
||||
|
||||
@@ -257,17 +257,17 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 19,
|
||||
"id": "9194bcf2",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Response(output=Joke(setup='Why was the cat sitting on the computer?', punchline='To keep an eye on the mouse!', rating=8))"
|
||||
"FinalResponse(final_output=Joke(setup='Why was the cat sitting on the computer?', punchline='Because it wanted to keep an eye on the mouse!', rating=7))"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"execution_count": 19,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -293,28 +293,28 @@
|
||||
" response: str = Field(description=\"A conversational response to the user's query\")\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"class Response(BaseModel):\n",
|
||||
" output: Union[Joke, ConversationalResponse]\n",
|
||||
"class FinalResponse(BaseModel):\n",
|
||||
" final_output: Union[Joke, ConversationalResponse]\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"structured_llm = llm.with_structured_output(Response)\n",
|
||||
"structured_llm = llm.with_structured_output(FinalResponse)\n",
|
||||
"\n",
|
||||
"structured_llm.invoke(\"Tell me a joke about cats\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 20,
|
||||
"id": "84d86132",
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"Response(output=ConversationalResponse(response=\"I'm just a digital assistant, so I don't have feelings, but I'm here and ready to help you. How can I assist you today?\"))"
|
||||
"FinalResponse(final_output=ConversationalResponse(response=\"I'm just a bunch of code, so I don't have feelings, but I'm here and ready to help you! How can I assist you today?\"))"
|
||||
]
|
||||
},
|
||||
"execution_count": 5,
|
||||
"execution_count": 20,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
@@ -915,9 +915,9 @@
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"display_name": "poetry-venv-2",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
"name": "poetry-venv-2"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
|
||||
@@ -57,7 +57,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)"
|
||||
]
|
||||
|
||||
@@ -57,7 +57,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)"
|
||||
]
|
||||
|
||||
@@ -53,7 +53,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)"
|
||||
]
|
||||
|
||||
@@ -58,7 +58,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)\n",
|
||||
"llm_with_tools = llm.bind_tools(tools)"
|
||||
|
||||
@@ -46,7 +46,8 @@
|
||||
"\n",
|
||||
"from langchain_openai import ChatOpenAI\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"llm = ChatOpenAI(model=\"gpt-4o-mini\", temperature=0)\n",
|
||||
"llm_with_tools = llm.bind_tools(tools)"
|
||||
|
||||
@@ -50,18 +50,19 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "62e0dbc3",
|
||||
"metadata": {
|
||||
"tags": []
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
"if \"AI21_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@@ -73,14 +74,14 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "7c2e19d3-7c58-4470-9e1a-718b27a32056",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"\n",
|
||||
"# os.environ[\"LANGCHAIN_API_KEY\"] = getpass.getpass(\"Enter your LangSmith API key: \")"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@@ -115,15 +116,15 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c40756fb-cbf8-4d44-a293-3989d707237e",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_ai21 import ChatAI21\n",
|
||||
"\n",
|
||||
"llm = ChatAI21(model=\"jamba-instruct\", temperature=0)"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@@ -135,8 +136,10 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "46b982dc-5d8a-46da-a711-81c03ccd6adc",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"messages = [\n",
|
||||
" (\n",
|
||||
@@ -147,9 +150,7 @@
|
||||
"]\n",
|
||||
"ai_msg = llm.invoke(messages)\n",
|
||||
"ai_msg"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
@@ -163,6 +164,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "39353473fce5dd2e",
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
@@ -170,6 +172,7 @@
|
||||
"outputs_hidden": false
|
||||
}
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain_core.prompts import ChatPromptTemplate\n",
|
||||
"\n",
|
||||
@@ -191,25 +194,26 @@
|
||||
" \"input\": \"I love programming.\",\n",
|
||||
" }\n",
|
||||
")"
|
||||
],
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "markdown",
|
||||
"source": "# Tool Calls / Function Calling",
|
||||
"id": "39c0ccd229927eab"
|
||||
"id": "39c0ccd229927eab",
|
||||
"metadata": {},
|
||||
"source": "# Tool Calls / Function Calling"
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "markdown",
|
||||
"source": "This example shows how to use tool calling with AI21 models:",
|
||||
"id": "2bf6b40be07fe2d4"
|
||||
"id": "2bf6b40be07fe2d4",
|
||||
"metadata": {},
|
||||
"source": "This example shows how to use tool calling with AI21 models:"
|
||||
},
|
||||
{
|
||||
"metadata": {},
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "a181a28df77120fb",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
@@ -219,7 +223,8 @@
|
||||
"from langchain_core.tools import tool\n",
|
||||
"from langchain_core.utils.function_calling import convert_to_openai_tool\n",
|
||||
"\n",
|
||||
"os.environ[\"AI21_API_KEY\"] = getpass()\n",
|
||||
"if \"AI21_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"AI21_API_KEY\"] = getpass()\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@tool\n",
|
||||
@@ -276,10 +281,7 @@
|
||||
" print(f\"Assistant: {llm_answer.content}\")\n",
|
||||
" else:\n",
|
||||
" print(f\"Assistant: {response.content}\")"
|
||||
],
|
||||
"id": "a181a28df77120fb",
|
||||
"outputs": [],
|
||||
"execution_count": null
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
|
||||
@@ -54,7 +54,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"ANYSCALE_API_KEY\"] = getpass()"
|
||||
"if \"ANYSCALE_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"ANYSCALE_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"EVERLYAI_API_KEY\"] = getpass()"
|
||||
"if \"EVERLYAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"EVERLYAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
"if \"GIGACHAT_CREDENTIALS\" not in os.environ:\n",
|
||||
" os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -58,7 +58,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
"if \"AI21_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -24,7 +24,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -67,7 +67,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
"if \"AI21_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"AI21_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -67,7 +67,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
"if \"ANTHROPIC_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"ANTHROPIC_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -47,7 +47,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
"if \"GIGACHAT_CREDENTIALS\" not in os.environ:\n",
|
||||
" os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -68,7 +68,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -128,7 +128,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key:\")"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key:\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -48,7 +48,8 @@
|
||||
"\n",
|
||||
"from langchain_openai.embeddings import OpenAIEmbeddings\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key:\")\n",
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key:\")\n",
|
||||
"\n",
|
||||
"embeddings = OpenAIEmbeddings()"
|
||||
]
|
||||
|
||||
@@ -44,7 +44,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
"if \"GIGACHAT_CREDENTIALS\" not in os.environ:\n",
|
||||
" os.environ[\"GIGACHAT_CREDENTIALS\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -245,7 +245,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -90,7 +90,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"OPENAI_API_KEY = \")"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass(\"OPENAI_API_KEY = \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -61,7 +61,8 @@
|
||||
"source": [
|
||||
"KDBAI_ENDPOINT = input(\"KDB.AI endpoint: \")\n",
|
||||
"KDBAI_API_KEY = getpass(\"KDB.AI API key: \")\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key: \")"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass(\"OpenAI API Key: \")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -44,7 +44,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass(\"Enter your OpenAI key:\")"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass(\"Enter your OpenAI key:\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -50,7 +50,8 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# %pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma"
|
||||
"%%capture --no-stderr\n",
|
||||
"%pip install -qU langchain langchain-community langchain-openai youtube-transcript-api pytube langchain-chroma"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -595,7 +596,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.1"
|
||||
"version": "3.11.9"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -39,7 +39,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -38,7 +38,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -46,7 +46,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -37,7 +37,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -100,7 +100,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -40,7 +40,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -44,7 +44,8 @@
|
||||
"import os\n",
|
||||
"from getpass import getpass\n",
|
||||
"\n",
|
||||
"os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
"if \"OPENAI_API_KEY\" not in os.environ:\n",
|
||||
" os.environ[\"OPENAI_API_KEY\"] = getpass()"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -45,6 +45,7 @@ lint_tests: PYTHON_FILES=tests
|
||||
lint_tests: MYPY_CACHE=.mypy_cache_test
|
||||
|
||||
lint lint_diff lint_package lint_tests:
|
||||
./scripts/check_pydantic.sh .
|
||||
./scripts/lint_imports.sh .
|
||||
./scripts/check_pickle.sh .
|
||||
[ "$(PYTHON_FILES)" = "" ] || poetry run ruff check $(PYTHON_FILES)
|
||||
|
||||
49
libs/community/poetry.lock
generated
49
libs/community/poetry.lock
generated
@@ -505,6 +505,37 @@ files = [
|
||||
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1745,7 +1776,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0.dev2"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1755,7 +1786,7 @@ develop = true
|
||||
[package.dependencies]
|
||||
aiohttp = "^3.8.3"
|
||||
async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""}
|
||||
langchain-core = "^0.3.0.dev2"
|
||||
langchain-core = "^0.3.0.dev5"
|
||||
langchain-text-splitters = "^0.3.0.dev1"
|
||||
langsmith = "^0.1.17"
|
||||
numpy = [
|
||||
@@ -1774,7 +1805,7 @@ url = "../langchain"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev4"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1783,7 +1814,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.112"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -3551,7 +3582,9 @@ python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"},
|
||||
@@ -3572,19 +3605,25 @@ files = [
|
||||
{file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"},
|
||||
{file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"},
|
||||
@@ -4340,4 +4379,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "59c1fe138c1696cc75b88108ee68b03018202799bf7096273427a22d36c0912e"
|
||||
content-hash = "1a81994350c65c891f5f592a522975bc6688cfad016f2af5fe8ad93a76209066"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-community"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0.dev2"
|
||||
description = "Community contributed LangChain integrations."
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@@ -33,8 +33,8 @@ ignore-words-list = "momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogy
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = { version = "^0.3.0.dev2", allow-prereleases = true }
|
||||
langchain = { version = "^0.3.0.dev1", allow-prereleases = true }
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
langchain = { version = "^0.3.0.dev2", allow-prereleases = true }
|
||||
SQLAlchemy = ">=1.4,<3"
|
||||
requests = "^2"
|
||||
PyYAML = ">=5.3"
|
||||
|
||||
55
libs/community/scripts/check_pydantic.sh
Executable file
55
libs/community/scripts/check_pydantic.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# This script searches for lines starting with "import pydantic" or "from pydantic"
|
||||
# in tracked files within a Git repository.
|
||||
#
|
||||
# Usage: ./scripts/check_pydantic.sh /path/to/repository
|
||||
|
||||
# Check if a path argument is provided
|
||||
if [ $# -ne 1 ]; then
|
||||
echo "Usage: $0 /path/to/repository"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
repository_path="$1"
|
||||
|
||||
# Check that we are not using features that cannot be captured via init.
|
||||
# pre-init is a custom decorator that we introduced to capture the same semantics
|
||||
# as @root_validator(pre=False, skip_on_failure=False) available in pydantic 1.
|
||||
count=$(git grep -E '(@root_validator)|(@validator)|(@field_validator)|(@pre_init)' -- "*.py" | wc -l)
|
||||
# PRs that increase the current count will not be accepted.
|
||||
# PRs that decrease update the code in the repository
|
||||
# and allow decreasing the count of are welcome!
|
||||
current_count=129
|
||||
|
||||
if [ "$count" -gt "$current_count" ]; then
|
||||
echo "The PR seems to be introducing new usage of @root_validator and/or @field_validator."
|
||||
echo "git grep -E '(@root_validator)|(@validator)|(@field_validator)|(@pre_init)' | wc -l returned $count"
|
||||
echo "whereas the expected count should be equal or less than $current_count"
|
||||
echo "Please update the code to instead use @model_validator or __init__"
|
||||
exit 1
|
||||
elif [ "$count" -lt "$current_count" ]; then
|
||||
echo "Please update the $current_count variable in ./scripts/check_pydantic.sh to $count"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# We do not want to be using pydantic-settings. There's already a pattern to look
|
||||
# up env settings in the code base, and we want to be using the existing pattern
|
||||
# rather than relying on an external dependency.
|
||||
count=$(git grep -E '^import pydantic_settings|^from pydantic_settings' -- "*.py" | wc -l)
|
||||
|
||||
# PRs that increase the current count will not be accepted.
|
||||
# PRs that decrease update the code in the repository
|
||||
# and allow decreasing the count of are welcome!
|
||||
current_count=8
|
||||
|
||||
if [ "$count" -gt "$current_count" ]; then
|
||||
echo "The PR seems to be introducing new usage pydantic_settings."
|
||||
echo "git grep -E '^import pydantic_settings|^from pydantic_settings' | wc -l returned $count"
|
||||
echo "whereas the expected count should be equal or less than $current_count"
|
||||
echo "Please update the code to use Field(default_factory=from_env(..)) or Field(default_factory=secret_from_env(..))"
|
||||
exit 1
|
||||
elif [ "$count" -lt "$current_count" ]; then
|
||||
echo "Please update the $current_count variable in ./scripts/check_pydantic.sh to $count"
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,11 +1,5 @@
|
||||
import inspect
|
||||
|
||||
_INTERNAL_PREFIXES = [
|
||||
"langchain", # For example langchain-core, langchain, langchain-anthropic
|
||||
"langserve",
|
||||
"langgraph",
|
||||
]
|
||||
|
||||
|
||||
def is_caller_internal(depth: int = 2) -> bool:
|
||||
"""Return whether the caller at `depth` of this function is internal."""
|
||||
@@ -24,11 +18,6 @@ def is_caller_internal(depth: int = 2) -> bool:
|
||||
if caller_module is None:
|
||||
return False
|
||||
caller_module_name = caller_module.__name__
|
||||
|
||||
for prefix in _INTERNAL_PREFIXES:
|
||||
if caller_module_name.startswith(prefix):
|
||||
return True
|
||||
return caller_module_name.startswith("langchain")
|
||||
finally:
|
||||
del frame
|
||||
|
||||
return False
|
||||
|
||||
@@ -272,7 +272,7 @@ def _create_message_from_message_type(
|
||||
message = RemoveMessage(**kwargs)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Unexpected message type: {message_type}. Use one of 'human',"
|
||||
f"Unexpected message type: '{message_type}'. Use one of 'human',"
|
||||
f" 'user', 'ai', 'assistant', 'function', 'tool', or 'system'."
|
||||
)
|
||||
return message
|
||||
|
||||
@@ -29,7 +29,7 @@ from langchain_core.prompt_values import (
|
||||
)
|
||||
from langchain_core.runnables import RunnableConfig, RunnableSerializable
|
||||
from langchain_core.runnables.config import ensure_config
|
||||
from langchain_core.runnables.utils import create_model
|
||||
from langchain_core.utils.pydantic import create_model_v2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.documents import Document
|
||||
@@ -125,8 +125,9 @@ class BasePromptTemplate(
|
||||
optional_input_variables = {
|
||||
k: (self.input_types.get(k, str), None) for k in self.optional_variables
|
||||
}
|
||||
return create_model(
|
||||
"PromptInput", **{**required_input_variables, **optional_input_variables}
|
||||
return create_model_v2(
|
||||
"PromptInput",
|
||||
field_definitions={**required_input_variables, **optional_input_variables},
|
||||
)
|
||||
|
||||
def _validate_input(self, inner_input: Any) -> Dict:
|
||||
|
||||
@@ -71,7 +71,6 @@ from langchain_core.runnables.utils import (
|
||||
accepts_config,
|
||||
accepts_run_manager,
|
||||
asyncio_accepts_context,
|
||||
create_model,
|
||||
gather_with_concurrency,
|
||||
get_function_first_arg_dict_keys,
|
||||
get_function_nonlocals,
|
||||
@@ -83,6 +82,7 @@ from langchain_core.runnables.utils import (
|
||||
)
|
||||
from langchain_core.utils.aiter import aclosing, atee, py_anext
|
||||
from langchain_core.utils.iter import safetee
|
||||
from langchain_core.utils.pydantic import create_model_v2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.callbacks.manager import (
|
||||
@@ -345,9 +345,9 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Input"),
|
||||
__root__=root_type,
|
||||
root=root_type,
|
||||
# create model needs access to appropriate type annotations to be
|
||||
# able to construct the pydantic model.
|
||||
# When we create the model, we pass information about the namespace
|
||||
@@ -355,7 +355,7 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
# be resolved correctly as well.
|
||||
# self.__class__.__module__ handles the case when the Runnable is
|
||||
# being sub-classed in a different module.
|
||||
__module_name=self.__class__.__module__,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
def get_input_jsonschema(
|
||||
@@ -413,9 +413,9 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Output"),
|
||||
__root__=root_type,
|
||||
root=root_type,
|
||||
# create model needs access to appropriate type annotations to be
|
||||
# able to construct the pydantic model.
|
||||
# When we create the model, we pass information about the namespace
|
||||
@@ -423,7 +423,7 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
# be resolved correctly as well.
|
||||
# self.__class__.__module__ handles the case when the Runnable is
|
||||
# being sub-classed in a different module.
|
||||
__module_name=self.__class__.__module__,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
def get_output_jsonschema(
|
||||
@@ -477,9 +477,9 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
include = include or []
|
||||
config_specs = self.config_specs
|
||||
configurable = (
|
||||
create_model( # type: ignore[call-overload]
|
||||
create_model_v2( # type: ignore[call-overload]
|
||||
"Configurable",
|
||||
**{
|
||||
field_definitions={
|
||||
spec.id: (
|
||||
spec.annotation,
|
||||
Field(
|
||||
@@ -502,8 +502,8 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
if field_name in [i for i in include if i != "configurable"]
|
||||
},
|
||||
}
|
||||
model = create_model( # type: ignore[call-overload]
|
||||
self.get_name("Config"), **all_fields
|
||||
model = create_model_v2( # type: ignore[call-overload]
|
||||
self.get_name("Config"), field_definitions=all_fields
|
||||
)
|
||||
return model
|
||||
|
||||
@@ -530,14 +530,14 @@ class Runnable(Generic[Input, Output], ABC):
|
||||
try:
|
||||
input_node = graph.add_node(self.get_input_schema(config))
|
||||
except TypeError:
|
||||
input_node = graph.add_node(create_model(self.get_name("Input")))
|
||||
input_node = graph.add_node(create_model_v2(self.get_name("Input")))
|
||||
runnable_node = graph.add_node(
|
||||
self, metadata=config.get("metadata") if config else None
|
||||
)
|
||||
try:
|
||||
output_node = graph.add_node(self.get_output_schema(config))
|
||||
except TypeError:
|
||||
output_node = graph.add_node(create_model(self.get_name("Output")))
|
||||
output_node = graph.add_node(create_model_v2(self.get_name("Output")))
|
||||
graph.add_edge(input_node, runnable_node)
|
||||
graph.add_edge(runnable_node, output_node)
|
||||
return graph
|
||||
@@ -2583,9 +2583,9 @@ def _seq_input_schema(
|
||||
next_input_schema = _seq_input_schema(steps[1:], config)
|
||||
if not issubclass(next_input_schema, RootModel):
|
||||
# it's a dict as expected
|
||||
return create_model( # type: ignore[call-overload]
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableSequenceInput",
|
||||
**{
|
||||
field_definitions={
|
||||
k: (v.annotation, v.default)
|
||||
for k, v in next_input_schema.model_fields.items()
|
||||
if k not in first.mapper.steps__
|
||||
@@ -2610,9 +2610,9 @@ def _seq_output_schema(
|
||||
prev_output_schema = _seq_output_schema(steps[:-1], config)
|
||||
if not issubclass(prev_output_schema, RootModel):
|
||||
# it's a dict as expected
|
||||
return create_model( # type: ignore[call-overload]
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableSequenceOutput",
|
||||
**{
|
||||
field_definitions={
|
||||
**{
|
||||
k: (v.annotation, v.default)
|
||||
for k, v in prev_output_schema.model_fields.items()
|
||||
@@ -2628,9 +2628,9 @@ def _seq_output_schema(
|
||||
if not issubclass(prev_output_schema, RootModel):
|
||||
# it's a dict as expected
|
||||
if isinstance(last.keys, list):
|
||||
return create_model( # type: ignore[call-overload]
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableSequenceOutput",
|
||||
**{
|
||||
field_definitions={
|
||||
k: (v.annotation, v.default)
|
||||
for k, v in prev_output_schema.model_fields.items()
|
||||
if k in last.keys
|
||||
@@ -2638,9 +2638,8 @@ def _seq_output_schema(
|
||||
)
|
||||
else:
|
||||
field = prev_output_schema.model_fields[last.keys]
|
||||
return create_model( # type: ignore[call-overload]
|
||||
"RunnableSequenceOutput",
|
||||
__root__=(field.annotation, field.default),
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableSequenceOutput", root=(field.annotation, field.default)
|
||||
)
|
||||
|
||||
return last.get_output_schema(config)
|
||||
@@ -3582,9 +3581,9 @@ class RunnableParallel(RunnableSerializable[Input, Dict[str, Any]]):
|
||||
for s in self.steps__.values()
|
||||
):
|
||||
# This is correct, but pydantic typings/mypy don't think so.
|
||||
return create_model( # type: ignore[call-overload]
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
self.get_name("Input"),
|
||||
**{
|
||||
field_definitions={
|
||||
k: (v.annotation, v.default)
|
||||
for step in self.steps__.values()
|
||||
for k, v in step.get_input_schema(config).model_fields.items()
|
||||
@@ -3606,7 +3605,7 @@ class RunnableParallel(RunnableSerializable[Input, Dict[str, Any]]):
|
||||
The output schema of the Runnable.
|
||||
"""
|
||||
fields = {k: (v.OutputType, ...) for k, v in self.steps__.items()}
|
||||
return create_model(self.get_name("Output"), **fields)
|
||||
return create_model_v2(self.get_name("Output"), field_definitions=fields)
|
||||
|
||||
@property
|
||||
def config_specs(self) -> List[ConfigurableFieldSpec]:
|
||||
@@ -4076,13 +4075,13 @@ class RunnableGenerator(Runnable[Input, Output]):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Input"),
|
||||
__root__=root_type,
|
||||
root=root_type,
|
||||
# To create the schema, we need to provide the module
|
||||
# where the underlying function is defined.
|
||||
# This allows pydantic to resolve type annotations appropriately.
|
||||
__module_name=module,
|
||||
module_name=module,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -4111,13 +4110,13 @@ class RunnableGenerator(Runnable[Input, Output]):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Output"),
|
||||
__root__=root_type,
|
||||
root=root_type,
|
||||
# To create the schema, we need to provide the module
|
||||
# where the underlying function is defined.
|
||||
# This allows pydantic to resolve type annotations appropriately.
|
||||
__module_name=module,
|
||||
module_name=module,
|
||||
)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
@@ -4366,25 +4365,25 @@ class RunnableLambda(Runnable[Input, Output]):
|
||||
):
|
||||
fields = {item[1:-1]: (Any, ...) for item in items}
|
||||
# It's a dict, lol
|
||||
return create_model(self.get_name("Input"), **fields)
|
||||
return create_model_v2(self.get_name("Input"), field_definitions=fields)
|
||||
else:
|
||||
module = getattr(func, "__module__", None)
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Input"),
|
||||
__root__=List[Any],
|
||||
root=List[Any],
|
||||
# To create the schema, we need to provide the module
|
||||
# where the underlying function is defined.
|
||||
# This allows pydantic to resolve type annotations appropriately.
|
||||
__module_name=module,
|
||||
module_name=module,
|
||||
)
|
||||
|
||||
if self.InputType != Any:
|
||||
return super().get_input_schema(config)
|
||||
|
||||
if dict_keys := get_function_first_arg_dict_keys(func):
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Input"),
|
||||
**{key: (Any, ...) for key in dict_keys}, # type: ignore
|
||||
field_definitions={key: (Any, ...) for key in dict_keys},
|
||||
)
|
||||
|
||||
return super().get_input_schema(config)
|
||||
@@ -4425,13 +4424,13 @@ class RunnableLambda(Runnable[Input, Output]):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Output"),
|
||||
__root__=root_type,
|
||||
root=root_type,
|
||||
# To create the schema, we need to provide the module
|
||||
# where the underlying function is defined.
|
||||
# This allows pydantic to resolve type annotations appropriately.
|
||||
__module_name=module,
|
||||
module_name=module,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -4945,9 +4944,9 @@ class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]):
|
||||
def get_input_schema(
|
||||
self, config: Optional[RunnableConfig] = None
|
||||
) -> Type[BaseModel]:
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Input"),
|
||||
__root__=(
|
||||
root=(
|
||||
List[self.bound.get_input_schema(config)], # type: ignore
|
||||
None,
|
||||
),
|
||||
@@ -4958,7 +4957,7 @@ class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]):
|
||||
# be resolved correctly as well.
|
||||
# self.__class__.__module__ handles the case when the Runnable is
|
||||
# being sub-classed in a different module.
|
||||
__module_name=self.__class__.__module__,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -4969,9 +4968,9 @@ class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]):
|
||||
self, config: Optional[RunnableConfig] = None
|
||||
) -> Type[BaseModel]:
|
||||
schema = self.bound.get_output_schema(config)
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
self.get_name("Output"),
|
||||
__root__=List[schema], # type: ignore[valid-type]
|
||||
root=List[schema], # type: ignore[valid-type]
|
||||
# create model needs access to appropriate type annotations to be
|
||||
# able to construct the pydantic model.
|
||||
# When we create the model, we pass information about the namespace
|
||||
@@ -4979,7 +4978,7 @@ class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]):
|
||||
# be resolved correctly as well.
|
||||
# self.__class__.__module__ handles the case when the Runnable is
|
||||
# being sub-classed in a different module.
|
||||
__module_name=self.__class__.__module__,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
@property
|
||||
|
||||
@@ -22,9 +22,9 @@ from langchain_core.runnables.passthrough import RunnablePassthrough
|
||||
from langchain_core.runnables.utils import (
|
||||
ConfigurableFieldSpec,
|
||||
Output,
|
||||
create_model,
|
||||
get_unique_config_specs,
|
||||
)
|
||||
from langchain_core.utils.pydantic import create_model_v2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.language_models.base import LanguageModelLike
|
||||
@@ -386,10 +386,15 @@ class RunnableWithMessageHistory(RunnableBindingBase):
|
||||
elif self.input_messages_key:
|
||||
fields[self.input_messages_key] = (Sequence[BaseMessage], ...)
|
||||
else:
|
||||
fields["__root__"] = (Sequence[BaseMessage], ...)
|
||||
return create_model( # type: ignore[call-overload]
|
||||
return create_model_v2(
|
||||
"RunnableWithChatHistoryInput",
|
||||
module_name=self.__class__.__module__,
|
||||
root=(Sequence[BaseMessage], ...),
|
||||
)
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableWithChatHistoryInput",
|
||||
**fields,
|
||||
field_definitions=fields,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -419,10 +424,10 @@ class RunnableWithMessageHistory(RunnableBindingBase):
|
||||
if inspect.isclass(root_type) and issubclass(root_type, BaseModel):
|
||||
return root_type
|
||||
|
||||
return create_model(
|
||||
return create_model_v2(
|
||||
"RunnableWithChatHistoryOutput",
|
||||
__root__=root_type,
|
||||
__module_name=self.__class__.__module__,
|
||||
root=root_type,
|
||||
module_name=self.__class__.__module__,
|
||||
)
|
||||
|
||||
def _is_not_async(self, *args: Sequence[Any], **kwargs: Dict[str, Any]) -> bool:
|
||||
|
||||
@@ -41,10 +41,10 @@ from langchain_core.runnables.graph import Graph
|
||||
from langchain_core.runnables.utils import (
|
||||
AddableDict,
|
||||
ConfigurableFieldSpec,
|
||||
create_model,
|
||||
)
|
||||
from langchain_core.utils.aiter import atee, py_anext
|
||||
from langchain_core.utils.iter import safetee
|
||||
from langchain_core.utils.pydantic import create_model_v2
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from langchain_core.callbacks.manager import (
|
||||
@@ -442,9 +442,8 @@ class RunnableAssign(RunnableSerializable[Dict[str, Any], Dict[str, Any]]):
|
||||
for name, field_info in map_output_schema.model_fields.items():
|
||||
fields[name] = (field_info.annotation, field_info.default)
|
||||
|
||||
return create_model( # type: ignore[call-overload]
|
||||
"RunnableAssignOutput",
|
||||
**fields,
|
||||
return create_model_v2( # type: ignore[call-overload]
|
||||
"RunnableAssignOutput", field_definitions=fields
|
||||
)
|
||||
elif not issubclass(map_output_schema, RootModel):
|
||||
# ie. only map output is a dict
|
||||
|
||||
@@ -6,7 +6,6 @@ import ast
|
||||
import asyncio
|
||||
import inspect
|
||||
import textwrap
|
||||
import warnings
|
||||
from functools import lru_cache
|
||||
from inspect import signature
|
||||
from itertools import groupby
|
||||
@@ -26,24 +25,17 @@ from typing import (
|
||||
Protocol,
|
||||
Sequence,
|
||||
Set,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
)
|
||||
|
||||
from pydantic import BaseModel, ConfigDict, PydanticDeprecationWarning, RootModel
|
||||
from pydantic import create_model as _create_model_base # pydantic :ignore
|
||||
from pydantic.json_schema import (
|
||||
DEFAULT_REF_TEMPLATE,
|
||||
GenerateJsonSchema,
|
||||
JsonSchemaMode,
|
||||
)
|
||||
from pydantic.v1 import BaseModel as BaseModelV1
|
||||
from typing_extensions import TypeGuard
|
||||
|
||||
from langchain_core.runnables.schema import StreamEvent
|
||||
|
||||
# Re-export create-model for backwards compatibility
|
||||
from langchain_core.utils.pydantic import create_model as create_model
|
||||
|
||||
Input = TypeVar("Input", contravariant=True)
|
||||
# Output type should implement __concat__, as eg str, list, dict do
|
||||
Output = TypeVar("Output", covariant=True)
|
||||
@@ -707,141 +699,6 @@ class _RootEventFilter:
|
||||
return include
|
||||
|
||||
|
||||
_SchemaConfig = ConfigDict(arbitrary_types_allowed=True, frozen=True)
|
||||
|
||||
NO_DEFAULT = object()
|
||||
|
||||
|
||||
def _create_root_model(
|
||||
name: str,
|
||||
type_: Any,
|
||||
module_name: Optional[str] = None,
|
||||
default_: object = NO_DEFAULT,
|
||||
) -> Type[BaseModel]:
|
||||
"""Create a base class."""
|
||||
|
||||
def schema(
|
||||
cls: Type[BaseModel],
|
||||
by_alias: bool = True,
|
||||
ref_template: str = DEFAULT_REF_TEMPLATE,
|
||||
) -> Dict[str, Any]:
|
||||
# Complains about schema not being defined in superclass
|
||||
schema_ = super(cls, cls).schema( # type: ignore[misc]
|
||||
by_alias=by_alias, ref_template=ref_template
|
||||
)
|
||||
schema_["title"] = name
|
||||
return schema_
|
||||
|
||||
def model_json_schema(
|
||||
cls: Type[BaseModel],
|
||||
by_alias: bool = True,
|
||||
ref_template: str = DEFAULT_REF_TEMPLATE,
|
||||
schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
|
||||
mode: JsonSchemaMode = "validation",
|
||||
) -> Dict[str, Any]:
|
||||
# Complains about model_json_schema not being defined in superclass
|
||||
schema_ = super(cls, cls).model_json_schema( # type: ignore[misc]
|
||||
by_alias=by_alias,
|
||||
ref_template=ref_template,
|
||||
schema_generator=schema_generator,
|
||||
mode=mode,
|
||||
)
|
||||
schema_["title"] = name
|
||||
return schema_
|
||||
|
||||
base_class_attributes = {
|
||||
"__annotations__": {"root": type_},
|
||||
"model_config": ConfigDict(arbitrary_types_allowed=True),
|
||||
"schema": classmethod(schema),
|
||||
"model_json_schema": classmethod(model_json_schema),
|
||||
"__module__": module_name or "langchain_core.runnables.utils",
|
||||
}
|
||||
|
||||
if default_ is not NO_DEFAULT:
|
||||
base_class_attributes["root"] = default_
|
||||
with warnings.catch_warnings():
|
||||
if isinstance(type_, type) and issubclass(type_, BaseModelV1):
|
||||
warnings.filterwarnings(
|
||||
action="ignore", category=PydanticDeprecationWarning
|
||||
)
|
||||
custom_root_type = type(name, (RootModel,), base_class_attributes)
|
||||
return cast(Type[BaseModel], custom_root_type)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _create_root_model_cached(
|
||||
__model_name: str,
|
||||
type_: Any,
|
||||
default_: object = NO_DEFAULT,
|
||||
module_name: Optional[str] = None,
|
||||
) -> Type[BaseModel]:
|
||||
return _create_root_model(
|
||||
__model_name, type_, default_=default_, module_name=module_name
|
||||
)
|
||||
|
||||
|
||||
def create_model(
|
||||
__model_name: str,
|
||||
__module_name: Optional[str] = None,
|
||||
**field_definitions: Any,
|
||||
) -> Type[BaseModel]:
|
||||
"""Create a pydantic model with the given field definitions.
|
||||
|
||||
Args:
|
||||
__model_name: The name of the model.
|
||||
__module_name: The name of the module where the model is defined.
|
||||
This is used by Pydantic to resolve any forward references.
|
||||
**field_definitions: The field definitions for the model.
|
||||
|
||||
Returns:
|
||||
Type[BaseModel]: The created model.
|
||||
"""
|
||||
|
||||
# Move this to caching path
|
||||
if "__root__" in field_definitions:
|
||||
if len(field_definitions) > 1:
|
||||
raise NotImplementedError(
|
||||
"When specifying __root__ no other "
|
||||
f"fields should be provided. Got {field_definitions}"
|
||||
)
|
||||
|
||||
arg = field_definitions["__root__"]
|
||||
if isinstance(arg, tuple):
|
||||
kwargs = {"type_": arg[0], "default_": arg[1]}
|
||||
else:
|
||||
kwargs = {"type_": arg}
|
||||
|
||||
try:
|
||||
named_root_model = _create_root_model_cached(
|
||||
__model_name, module_name=__module_name, **kwargs
|
||||
)
|
||||
except TypeError:
|
||||
# something in the arguments into _create_root_model_cached is not hashable
|
||||
named_root_model = _create_root_model(
|
||||
__model_name,
|
||||
module_name=__module_name,
|
||||
**kwargs,
|
||||
)
|
||||
return named_root_model
|
||||
try:
|
||||
return _create_model_cached(__model_name, **field_definitions)
|
||||
except TypeError:
|
||||
# something in field definitions is not hashable
|
||||
return _create_model_base(
|
||||
__model_name, __config__=_SchemaConfig, **field_definitions
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _create_model_cached(
|
||||
__model_name: str,
|
||||
**field_definitions: Any,
|
||||
) -> Type[BaseModel]:
|
||||
return _create_model_base(
|
||||
__model_name, __config__=_SchemaConfig, **field_definitions
|
||||
)
|
||||
|
||||
|
||||
def is_async_generator(
|
||||
func: Any,
|
||||
) -> TypeGuard[Callable[..., AsyncIterator]]:
|
||||
|
||||
@@ -38,6 +38,7 @@ from pydantic import (
|
||||
validate_arguments,
|
||||
)
|
||||
from pydantic.v1 import BaseModel as BaseModelV1
|
||||
from pydantic.v1 import validate_arguments as validate_arguments_v1
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from langchain_core._api import deprecated
|
||||
@@ -164,6 +165,35 @@ def _infer_arg_descriptions(
|
||||
return description, arg_descriptions
|
||||
|
||||
|
||||
def _is_pydantic_annotation(annotation: Any, pydantic_version: str = "v2") -> bool:
|
||||
"""Determine if a type annotation is a Pydantic model."""
|
||||
base_model_class = BaseModelV1 if pydantic_version == "v1" else BaseModel
|
||||
try:
|
||||
return issubclass(annotation, base_model_class)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def _function_annotations_are_pydantic_v1(
|
||||
signature: inspect.Signature, func: Callable
|
||||
) -> bool:
|
||||
"""Determine if all Pydantic annotations in a function signature are from V1."""
|
||||
any_v1_annotations = any(
|
||||
_is_pydantic_annotation(parameter.annotation, pydantic_version="v1")
|
||||
for parameter in signature.parameters.values()
|
||||
)
|
||||
any_v2_annotations = any(
|
||||
_is_pydantic_annotation(parameter.annotation, pydantic_version="v2")
|
||||
for parameter in signature.parameters.values()
|
||||
)
|
||||
if any_v1_annotations and any_v2_annotations:
|
||||
raise NotImplementedError(
|
||||
f"Function {func} contains a mix of Pydantic v1 and v2 annotations. "
|
||||
"Only one version of Pydantic annotations per function is supported."
|
||||
)
|
||||
return any_v1_annotations and not any_v2_annotations
|
||||
|
||||
|
||||
class _SchemaConfig:
|
||||
"""Configuration for the pydantic model.
|
||||
|
||||
@@ -208,16 +238,19 @@ def create_schema_from_function(
|
||||
Returns:
|
||||
A pydantic model with the same arguments as the function.
|
||||
"""
|
||||
# https://docs.pydantic.dev/latest/usage/validation_decorator/
|
||||
with warnings.catch_warnings():
|
||||
# We are using deprecated functionality here.
|
||||
# This code should be re-written to simply construct a pydantic model
|
||||
# using inspect.signature and create_model.
|
||||
warnings.simplefilter("ignore", category=PydanticDeprecationWarning)
|
||||
validated = validate_arguments(func, config=_SchemaConfig) # type: ignore
|
||||
|
||||
sig = inspect.signature(func)
|
||||
|
||||
if _function_annotations_are_pydantic_v1(sig, func):
|
||||
validated = validate_arguments_v1(func, config=_SchemaConfig) # type: ignore
|
||||
else:
|
||||
# https://docs.pydantic.dev/latest/usage/validation_decorator/
|
||||
with warnings.catch_warnings():
|
||||
# We are using deprecated functionality here.
|
||||
# This code should be re-written to simply construct a pydantic model
|
||||
# using inspect.signature and create_model.
|
||||
warnings.simplefilter("ignore", category=PydanticDeprecationWarning)
|
||||
validated = validate_arguments(func, config=_SchemaConfig) # type: ignore
|
||||
|
||||
# Let's ignore `self` and `cls` arguments for class and instance methods
|
||||
if func.__qualname__ and "." in func.__qualname__:
|
||||
# Then it likely belongs in a class namespace
|
||||
@@ -274,7 +307,7 @@ def create_schema_from_function(
|
||||
valid_properties.append(field)
|
||||
|
||||
return _create_subset_model(
|
||||
f"{model_name}Schema",
|
||||
model_name,
|
||||
inferred_model,
|
||||
list(valid_properties),
|
||||
descriptions=arg_descriptions,
|
||||
|
||||
@@ -87,7 +87,7 @@ def tool(
|
||||
.. code-block:: python
|
||||
|
||||
{
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"description": "The foo.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -288,7 +288,10 @@ def format_tool_to_openai_function(tool: BaseTool) -> FunctionDescription:
|
||||
Returns:
|
||||
The function description.
|
||||
"""
|
||||
if tool.tool_call_schema:
|
||||
from langchain_core.tools import simple
|
||||
|
||||
is_simple_oai_tool = isinstance(tool, simple.Tool) and not tool.args_schema
|
||||
if tool.tool_call_schema and not is_simple_oai_tool:
|
||||
return convert_pydantic_to_openai_function(
|
||||
tool.tool_call_schema, name=tool.name, description=tool.description
|
||||
)
|
||||
|
||||
@@ -5,12 +5,38 @@ from __future__ import annotations
|
||||
import inspect
|
||||
import textwrap
|
||||
import warnings
|
||||
from functools import wraps
|
||||
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, overload
|
||||
from contextlib import nullcontext
|
||||
from functools import lru_cache, wraps
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
|
||||
import pydantic
|
||||
from pydantic import BaseModel, PydanticDeprecationWarning, root_validator
|
||||
from pydantic.json_schema import GenerateJsonSchema, JsonSchemaValue
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
PydanticDeprecationWarning,
|
||||
RootModel,
|
||||
root_validator,
|
||||
)
|
||||
from pydantic import (
|
||||
create_model as _create_model_base,
|
||||
)
|
||||
from pydantic.json_schema import (
|
||||
DEFAULT_REF_TEMPLATE,
|
||||
GenerateJsonSchema,
|
||||
JsonSchemaMode,
|
||||
JsonSchemaValue,
|
||||
)
|
||||
from pydantic_core import core_schema
|
||||
|
||||
|
||||
@@ -355,3 +381,237 @@ elif PYDANTIC_MAJOR_VERSION == 1:
|
||||
return model.__fields__ # type: ignore
|
||||
else:
|
||||
raise ValueError(f"Unsupported Pydantic version: {PYDANTIC_MAJOR_VERSION}")
|
||||
|
||||
_SchemaConfig = ConfigDict(
|
||||
arbitrary_types_allowed=True, frozen=True, protected_namespaces=()
|
||||
)
|
||||
|
||||
NO_DEFAULT = object()
|
||||
|
||||
|
||||
def _create_root_model(
|
||||
name: str,
|
||||
type_: Any,
|
||||
module_name: Optional[str] = None,
|
||||
default_: object = NO_DEFAULT,
|
||||
) -> Type[BaseModel]:
|
||||
"""Create a base class."""
|
||||
|
||||
def schema(
|
||||
cls: Type[BaseModel],
|
||||
by_alias: bool = True,
|
||||
ref_template: str = DEFAULT_REF_TEMPLATE,
|
||||
) -> Dict[str, Any]:
|
||||
# Complains about schema not being defined in superclass
|
||||
schema_ = super(cls, cls).schema( # type: ignore[misc]
|
||||
by_alias=by_alias, ref_template=ref_template
|
||||
)
|
||||
schema_["title"] = name
|
||||
return schema_
|
||||
|
||||
def model_json_schema(
|
||||
cls: Type[BaseModel],
|
||||
by_alias: bool = True,
|
||||
ref_template: str = DEFAULT_REF_TEMPLATE,
|
||||
schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema,
|
||||
mode: JsonSchemaMode = "validation",
|
||||
) -> Dict[str, Any]:
|
||||
# Complains about model_json_schema not being defined in superclass
|
||||
schema_ = super(cls, cls).model_json_schema( # type: ignore[misc]
|
||||
by_alias=by_alias,
|
||||
ref_template=ref_template,
|
||||
schema_generator=schema_generator,
|
||||
mode=mode,
|
||||
)
|
||||
schema_["title"] = name
|
||||
return schema_
|
||||
|
||||
base_class_attributes = {
|
||||
"__annotations__": {"root": type_},
|
||||
"model_config": ConfigDict(arbitrary_types_allowed=True),
|
||||
"schema": classmethod(schema),
|
||||
"model_json_schema": classmethod(model_json_schema),
|
||||
"__module__": module_name or "langchain_core.runnables.utils",
|
||||
}
|
||||
|
||||
if default_ is not NO_DEFAULT:
|
||||
base_class_attributes["root"] = default_
|
||||
with warnings.catch_warnings():
|
||||
if isinstance(type_, type) and issubclass(type_, BaseModelV1):
|
||||
warnings.filterwarnings(
|
||||
action="ignore", category=PydanticDeprecationWarning
|
||||
)
|
||||
custom_root_type = type(name, (RootModel,), base_class_attributes)
|
||||
return cast(Type[BaseModel], custom_root_type)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _create_root_model_cached(
|
||||
model_name: str,
|
||||
type_: Any,
|
||||
*,
|
||||
module_name: Optional[str] = None,
|
||||
default_: object = NO_DEFAULT,
|
||||
) -> Type[BaseModel]:
|
||||
return _create_root_model(
|
||||
model_name, type_, default_=default_, module_name=module_name
|
||||
)
|
||||
|
||||
|
||||
@lru_cache(maxsize=256)
|
||||
def _create_model_cached(
|
||||
__model_name: str,
|
||||
**field_definitions: Any,
|
||||
) -> Type[BaseModel]:
|
||||
return _create_model_base(
|
||||
__model_name,
|
||||
__config__=_SchemaConfig,
|
||||
**_remap_field_definitions(field_definitions),
|
||||
)
|
||||
|
||||
|
||||
def create_model(
|
||||
__model_name: str,
|
||||
__module_name: Optional[str] = None,
|
||||
**field_definitions: Any,
|
||||
) -> Type[BaseModel]:
|
||||
"""Create a pydantic model with the given field definitions.
|
||||
|
||||
Please use create_model_v2 instead of this function.
|
||||
|
||||
Args:
|
||||
__model_name: The name of the model.
|
||||
__module_name: The name of the module where the model is defined.
|
||||
This is used by Pydantic to resolve any forward references.
|
||||
**field_definitions: The field definitions for the model.
|
||||
|
||||
Returns:
|
||||
Type[BaseModel]: The created model.
|
||||
"""
|
||||
kwargs = {}
|
||||
if "__root__" in field_definitions:
|
||||
kwargs["root"] = field_definitions.pop("__root__")
|
||||
|
||||
return create_model_v2(
|
||||
__model_name,
|
||||
module_name=__module_name,
|
||||
field_definitions=field_definitions,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
# Reserved names should capture all the `public` names / methods that are
|
||||
# used by BaseModel internally. This will keep the reserved names up-to-date.
|
||||
# For reference, the reserved names are:
|
||||
# "construct", "copy", "dict", "from_orm", "json", "parse_file", "parse_obj",
|
||||
# "parse_raw", "schema", "schema_json", "update_forward_refs", "validate",
|
||||
# "model_computed_fields", "model_config", "model_construct", "model_copy",
|
||||
# "model_dump", "model_dump_json", "model_extra", "model_fields",
|
||||
# "model_fields_set", "model_json_schema", "model_parametrized_name",
|
||||
# "model_post_init", "model_rebuild", "model_validate", "model_validate_json",
|
||||
# "model_validate_strings"
|
||||
_RESERVED_NAMES = {key for key in dir(BaseModel) if not key.startswith("_")}
|
||||
|
||||
|
||||
def _remap_field_definitions(field_definitions: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""This remaps fields to avoid colliding with internal pydantic fields."""
|
||||
from pydantic import Field
|
||||
from pydantic.fields import FieldInfo
|
||||
|
||||
remapped = {}
|
||||
for key, value in field_definitions.items():
|
||||
if key.startswith("_") or key in _RESERVED_NAMES:
|
||||
# Let's add a prefix to avoid colliding with internal pydantic fields
|
||||
if isinstance(value, FieldInfo):
|
||||
raise NotImplementedError(
|
||||
f"Remapping for fields starting with '_' or fields with a name "
|
||||
f"matching a reserved name {_RESERVED_NAMES} is not supported if "
|
||||
f" the field is a pydantic Field instance. Got {key}."
|
||||
)
|
||||
type_, default_ = value
|
||||
remapped[f"private_{key}"] = (
|
||||
type_,
|
||||
Field(
|
||||
default=default_,
|
||||
alias=key,
|
||||
serialization_alias=key,
|
||||
title=key.lstrip("_").replace("_", " ").title(),
|
||||
),
|
||||
)
|
||||
else:
|
||||
remapped[key] = value
|
||||
return remapped
|
||||
|
||||
|
||||
def create_model_v2(
|
||||
model_name: str,
|
||||
*,
|
||||
module_name: Optional[str] = None,
|
||||
field_definitions: Optional[Dict[str, Any]] = None,
|
||||
root: Optional[Any] = None,
|
||||
) -> Type[BaseModel]:
|
||||
"""Create a pydantic model with the given field definitions.
|
||||
|
||||
Attention:
|
||||
Please do not use outside of langchain packages. This API
|
||||
is subject to change at any time.
|
||||
|
||||
Args:
|
||||
model_name: The name of the model.
|
||||
module_name: The name of the module where the model is defined.
|
||||
This is used by Pydantic to resolve any forward references.
|
||||
field_definitions: The field definitions for the model.
|
||||
root: Type for a root model (RootModel)
|
||||
|
||||
Returns:
|
||||
Type[BaseModel]: The created model.
|
||||
"""
|
||||
field_definitions = cast(Dict[str, Any], field_definitions or {}) # type: ignore[no-redef]
|
||||
|
||||
if root:
|
||||
if field_definitions:
|
||||
raise NotImplementedError(
|
||||
"When specifying __root__ no other "
|
||||
f"fields should be provided. Got {field_definitions}"
|
||||
)
|
||||
|
||||
if isinstance(root, tuple):
|
||||
kwargs = {"type_": root[0], "default_": root[1]}
|
||||
else:
|
||||
kwargs = {"type_": root}
|
||||
|
||||
try:
|
||||
named_root_model = _create_root_model_cached(
|
||||
model_name, module_name=module_name, **kwargs
|
||||
)
|
||||
except TypeError:
|
||||
# something in the arguments into _create_root_model_cached is not hashable
|
||||
named_root_model = _create_root_model(
|
||||
model_name,
|
||||
module_name=module_name,
|
||||
**kwargs,
|
||||
)
|
||||
return named_root_model
|
||||
|
||||
# No root, just field definitions
|
||||
names = set(field_definitions.keys())
|
||||
|
||||
capture_warnings = False
|
||||
|
||||
for name in names:
|
||||
# Also if any non-reserved name is used (e.g., model_id or model_name)
|
||||
if name.startswith("model"):
|
||||
capture_warnings = True
|
||||
|
||||
with warnings.catch_warnings() if capture_warnings else nullcontext(): # type: ignore[attr-defined]
|
||||
if capture_warnings:
|
||||
warnings.filterwarnings(action="ignore")
|
||||
try:
|
||||
return _create_model_cached(model_name, **field_definitions)
|
||||
except TypeError:
|
||||
# something in field definitions is not hashable
|
||||
return _create_model_base(
|
||||
model_name,
|
||||
__config__=_SchemaConfig,
|
||||
**_remap_field_definitions(field_definitions),
|
||||
)
|
||||
|
||||
31
libs/core/poetry.lock
generated
31
libs/core/poetry.lock
generated
@@ -278,6 +278,37 @@ files = [
|
||||
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev4"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
|
||||
@@ -864,3 +864,48 @@ async def test_chat_tmpl_serdes(snapshot: SnapshotAssertion) -> None:
|
||||
)
|
||||
assert dumpd(template) == snapshot()
|
||||
assert load(dumpd(template)) == template
|
||||
|
||||
|
||||
def test_chat_prompt_template_variable_names() -> None:
|
||||
"""This test was written for an edge case that triggers a warning from Pydantic.
|
||||
|
||||
Verify that no run time warnings are raised.
|
||||
"""
|
||||
with pytest.warns(None) as record: # type: ignore
|
||||
prompt = ChatPromptTemplate([("system", "{schema}")])
|
||||
prompt.get_input_schema()
|
||||
|
||||
if record:
|
||||
error_msg = []
|
||||
for warning in record:
|
||||
error_msg.append(
|
||||
f"Warning type: {warning.category.__name__}, "
|
||||
f"Warning message: {warning.message}, "
|
||||
f"Warning location: {warning.filename}:{warning.lineno}"
|
||||
)
|
||||
msg = "\n".join(error_msg)
|
||||
else:
|
||||
msg = ""
|
||||
|
||||
assert list(record) == [], msg
|
||||
|
||||
# Verify value errors raised from illegal names
|
||||
assert ChatPromptTemplate(
|
||||
[("system", "{_private}")]
|
||||
).get_input_schema().model_json_schema() == {
|
||||
"properties": {"_private": {"title": "Private", "type": "string"}},
|
||||
"required": ["_private"],
|
||||
"title": "PromptInput",
|
||||
"type": "object",
|
||||
}
|
||||
|
||||
assert ChatPromptTemplate(
|
||||
[("system", "{model_json_schema}")]
|
||||
).get_input_schema().model_json_schema() == {
|
||||
"properties": {
|
||||
"model_json_schema": {"title": "Model Json Schema", "type": "string"}
|
||||
},
|
||||
"required": ["model_json_schema"],
|
||||
"title": "PromptInput",
|
||||
"type": "object",
|
||||
}
|
||||
|
||||
@@ -35,3 +35,9 @@ EXPECTED_ALL = [
|
||||
|
||||
def test_all_imports() -> None:
|
||||
assert set(__all__) == set(EXPECTED_ALL)
|
||||
|
||||
|
||||
def test_imports_for_specific_funcs() -> None:
|
||||
"""Test that a few specific imports in more internal namespaces."""
|
||||
# create_model implementation has been moved to langchain_core.utils.pydantic
|
||||
from langchain_core.runnables.utils import create_model # noqa
|
||||
|
||||
@@ -5392,3 +5392,36 @@ def test_pydantic_protected_namespaces() -> None:
|
||||
|
||||
class CustomChatModel(RunnableSerializable):
|
||||
model_kwargs: Dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
|
||||
def test_schema_for_prompt_and_chat_model() -> None:
|
||||
"""Testing that schema is generated properly when using variable names
|
||||
|
||||
that collide with pydantic attributes.
|
||||
"""
|
||||
prompt = ChatPromptTemplate([("system", "{model_json_schema}, {_private}, {json}")])
|
||||
chat_res = "i'm a chatbot"
|
||||
# sleep to better simulate a real stream
|
||||
chat = FakeListChatModel(responses=[chat_res], sleep=0.01)
|
||||
chain = prompt | chat
|
||||
assert (
|
||||
chain.invoke(
|
||||
{"model_json_schema": "hello", "_private": "goodbye", "json": "json"}
|
||||
).content
|
||||
== chat_res
|
||||
)
|
||||
|
||||
assert chain.get_input_jsonschema() == {
|
||||
"properties": {
|
||||
"model_json_schema": {"title": "Model Json Schema", "type": "string"},
|
||||
"_private": {"title": "Private", "type": "string"},
|
||||
"json": {"title": "Json", "type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"_private",
|
||||
"json",
|
||||
"model_json_schema",
|
||||
],
|
||||
"title": "PromptInput",
|
||||
"type": "object",
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ from typing import (
|
||||
|
||||
import pytest
|
||||
from pydantic import BaseModel, Field, ValidationError
|
||||
from pydantic import BaseModel as BaseModelProper
|
||||
from pydantic.v1 import BaseModel as BaseModelV1
|
||||
from typing_extensions import Annotated, TypedDict, TypeVar
|
||||
|
||||
from langchain_core import tools
|
||||
@@ -80,6 +80,14 @@ class _MockSchema(BaseModel):
|
||||
arg3: Optional[dict] = None
|
||||
|
||||
|
||||
class _MockSchemaV1(BaseModelV1):
|
||||
"""Return the arguments directly."""
|
||||
|
||||
arg1: int
|
||||
arg2: bool
|
||||
arg3: Optional[dict] = None
|
||||
|
||||
|
||||
class _MockStructuredTool(BaseTool):
|
||||
name: str = "structured_api"
|
||||
args_schema: Type[BaseModel] = _MockSchema
|
||||
@@ -169,6 +177,13 @@ def test_decorator_with_specified_schema() -> None:
|
||||
assert isinstance(tool_func, BaseTool)
|
||||
assert tool_func.args_schema == _MockSchema
|
||||
|
||||
@tool(args_schema=_MockSchemaV1)
|
||||
def tool_func_v1(arg1: int, arg2: bool, arg3: Optional[dict] = None) -> str:
|
||||
return f"{arg1} {arg2} {arg3}"
|
||||
|
||||
assert isinstance(tool_func_v1, BaseTool)
|
||||
assert tool_func_v1.args_schema == _MockSchemaV1
|
||||
|
||||
|
||||
def test_decorated_function_schema_equivalent() -> None:
|
||||
"""Test that a BaseTool without a schema meets expectations."""
|
||||
@@ -302,6 +317,51 @@ def test_structured_tool_types_parsed() -> None:
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_structured_tool_types_parsed_pydantic_v1() -> None:
|
||||
"""Test the non-primitive types are correctly passed to structured tools."""
|
||||
|
||||
class SomeBaseModel(BaseModelV1):
|
||||
foo: str
|
||||
|
||||
class AnotherBaseModel(BaseModelV1):
|
||||
bar: str
|
||||
|
||||
@tool
|
||||
def structured_tool(some_base_model: SomeBaseModel) -> AnotherBaseModel:
|
||||
"""Return the arguments directly."""
|
||||
return AnotherBaseModel(bar=some_base_model.foo)
|
||||
|
||||
assert isinstance(structured_tool, StructuredTool)
|
||||
|
||||
expected = AnotherBaseModel(bar="baz")
|
||||
for arg in [
|
||||
SomeBaseModel(foo="baz"),
|
||||
SomeBaseModel(foo="baz").dict(),
|
||||
]:
|
||||
args = {"some_base_model": arg}
|
||||
result = structured_tool.run(args)
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_structured_tool_types_parsed_pydantic_mixed() -> None:
|
||||
"""Test handling of tool with mixed Pydantic version arguments."""
|
||||
|
||||
class SomeBaseModel(BaseModelV1):
|
||||
foo: str
|
||||
|
||||
class AnotherBaseModel(BaseModel):
|
||||
bar: str
|
||||
|
||||
with pytest.raises(NotImplementedError):
|
||||
|
||||
@tool
|
||||
def structured_tool(
|
||||
some_base_model: SomeBaseModel, another_base_model: AnotherBaseModel
|
||||
) -> None:
|
||||
"""Return the arguments directly."""
|
||||
pass
|
||||
|
||||
|
||||
def test_base_tool_inheritance_base_schema() -> None:
|
||||
"""Test schema is correctly inferred when inheriting from BaseTool."""
|
||||
|
||||
@@ -359,7 +419,7 @@ def test_structured_tool_from_function_docstring() -> None:
|
||||
"baz": {"title": "Baz", "type": "string"},
|
||||
},
|
||||
"description": inspect.getdoc(foo),
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"required": ["bar", "baz"],
|
||||
}
|
||||
@@ -401,7 +461,7 @@ def test_structured_tool_from_function_docstring_complex_args() -> None:
|
||||
},
|
||||
},
|
||||
"description": inspect.getdoc(foo),
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"required": ["bar", "baz"],
|
||||
}
|
||||
@@ -502,7 +562,7 @@ def test_structured_tool_from_function_with_run_manager() -> None:
|
||||
"baz": {"title": "Baz", "type": "string"},
|
||||
},
|
||||
"description": inspect.getdoc(foo),
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"required": ["bar", "baz"],
|
||||
}
|
||||
@@ -734,7 +794,7 @@ def test_structured_tool_from_function() -> None:
|
||||
}
|
||||
|
||||
assert _schema(structured_tool.args_schema) == {
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"description": inspect.getdoc(foo),
|
||||
"properties": {
|
||||
@@ -1063,7 +1123,7 @@ def test_tool_arg_descriptions() -> None:
|
||||
foo1 = tool(foo)
|
||||
args_schema = _schema(foo1.args_schema) # type: ignore
|
||||
assert args_schema == {
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"description": inspect.getdoc(foo),
|
||||
"properties": {
|
||||
@@ -1077,7 +1137,7 @@ def test_tool_arg_descriptions() -> None:
|
||||
foo2 = tool(foo, parse_docstring=True)
|
||||
args_schema = _schema(foo2.args_schema) # type: ignore
|
||||
expected = {
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"description": "The foo.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1166,7 +1226,7 @@ def test_tool_annotated_descriptions() -> None:
|
||||
foo1 = tool(foo)
|
||||
args_schema = _schema(foo1.args_schema) # type: ignore
|
||||
assert args_schema == {
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"type": "object",
|
||||
"description": inspect.getdoc(foo),
|
||||
"properties": {
|
||||
@@ -1389,7 +1449,7 @@ def injected_tool_with_schema(x: int, y: str) -> str:
|
||||
@pytest.mark.parametrize("tool_", [InjectedTool()])
|
||||
def test_tool_injected_arg_without_schema(tool_: BaseTool) -> None:
|
||||
assert _schema(tool_.get_input_schema()) == {
|
||||
"title": "fooSchema",
|
||||
"title": "foo",
|
||||
"description": "foo.\n\nArgs:\n x: abc\n y: 123",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1428,7 +1488,7 @@ def test_tool_injected_arg_without_schema(tool_: BaseTool) -> None:
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"tool_",
|
||||
[injected_tool, injected_tool_with_schema, InjectedToolWithSchema()],
|
||||
[injected_tool_with_schema, InjectedToolWithSchema()],
|
||||
)
|
||||
def test_tool_injected_arg_with_schema(tool_: BaseTool) -> None:
|
||||
assert _schema(tool_.get_input_schema()) == {
|
||||
@@ -1469,6 +1529,46 @@ def test_tool_injected_arg_with_schema(tool_: BaseTool) -> None:
|
||||
}
|
||||
|
||||
|
||||
def test_tool_injected_arg() -> None:
|
||||
tool_ = injected_tool
|
||||
assert _schema(tool_.get_input_schema()) == {
|
||||
"title": "foo",
|
||||
"description": "foo.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"x": {"description": "abc", "title": "X", "type": "integer"},
|
||||
"y": {"description": "123", "title": "Y", "type": "string"},
|
||||
},
|
||||
"required": ["x", "y"],
|
||||
}
|
||||
assert _schema(tool_.tool_call_schema) == {
|
||||
"title": "foo",
|
||||
"description": "foo.",
|
||||
"type": "object",
|
||||
"properties": {"x": {"description": "abc", "title": "X", "type": "integer"}},
|
||||
"required": ["x"],
|
||||
}
|
||||
assert tool_.invoke({"x": 5, "y": "bar"}) == "bar"
|
||||
assert tool_.invoke(
|
||||
{"name": "foo", "args": {"x": 5, "y": "bar"}, "id": "123", "type": "tool_call"}
|
||||
) == ToolMessage("bar", tool_call_id="123", name="foo")
|
||||
expected_error = (
|
||||
ValidationError if not isinstance(tool_, InjectedTool) else TypeError
|
||||
)
|
||||
with pytest.raises(expected_error):
|
||||
tool_.invoke({"x": 5})
|
||||
|
||||
assert convert_to_openai_function(tool_) == {
|
||||
"name": "foo",
|
||||
"description": "foo.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {"x": {"type": "integer", "description": "abc"}},
|
||||
"required": ["x"],
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_tool_inherited_injected_arg() -> None:
|
||||
class barSchema(BaseModel):
|
||||
"""bar."""
|
||||
@@ -1492,7 +1592,7 @@ def test_tool_inherited_injected_arg() -> None:
|
||||
|
||||
tool_ = InheritedInjectedArgTool()
|
||||
assert tool_.get_input_schema().model_json_schema() == {
|
||||
"title": "fooSchema",
|
||||
"title": "fooSchema", # Matches the title from the provided schema
|
||||
"description": "foo.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -1501,6 +1601,7 @@ def test_tool_inherited_injected_arg() -> None:
|
||||
},
|
||||
"required": ["y", "x"],
|
||||
}
|
||||
# Should not include `y` since it's annotated as an injected tool arg
|
||||
assert tool_.tool_call_schema.model_json_schema() == {
|
||||
"title": "foo",
|
||||
"description": "foo.",
|
||||
@@ -1562,7 +1663,7 @@ def test_fn_injected_arg_with_schema(tool_: Callable) -> None:
|
||||
def generate_models() -> List[Any]:
|
||||
"""Generate a list of base models depending on the pydantic version."""
|
||||
|
||||
class FooProper(BaseModelProper):
|
||||
class FooProper(BaseModel):
|
||||
a: int
|
||||
b: str
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ from pydantic import BaseModel, Field
|
||||
|
||||
from langchain_core.messages import AIMessage, HumanMessage, ToolMessage
|
||||
from langchain_core.runnables import Runnable, RunnableLambda
|
||||
from langchain_core.tools import BaseTool, tool
|
||||
from langchain_core.tools import BaseTool, StructuredTool, Tool, tool
|
||||
from langchain_core.utils.function_calling import (
|
||||
_convert_typed_dict_to_openai_function,
|
||||
convert_to_openai_function,
|
||||
@@ -112,6 +112,20 @@ def dummy_tool() -> BaseTool:
|
||||
return DummyFunction()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_structured_tool() -> StructuredTool:
|
||||
class Schema(BaseModel):
|
||||
arg1: int = Field(..., description="foo")
|
||||
arg2: Literal["bar", "baz"] = Field(..., description="one of 'bar', 'baz'")
|
||||
|
||||
return StructuredTool.from_function(
|
||||
lambda x: None,
|
||||
name="dummy_function",
|
||||
description="dummy function",
|
||||
args_schema=Schema,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_pydantic() -> Type[BaseModel]:
|
||||
class dummy_function(BaseModel):
|
||||
@@ -234,6 +248,7 @@ class DummyWithClassMethod:
|
||||
def test_convert_to_openai_function(
|
||||
pydantic: Type[BaseModel],
|
||||
function: Callable,
|
||||
dummy_structured_tool: StructuredTool,
|
||||
dummy_tool: BaseTool,
|
||||
json_schema: Dict,
|
||||
Annotated_function: Callable,
|
||||
@@ -264,6 +279,7 @@ def test_convert_to_openai_function(
|
||||
for fn in (
|
||||
pydantic,
|
||||
function,
|
||||
dummy_structured_tool,
|
||||
dummy_tool,
|
||||
json_schema,
|
||||
expected,
|
||||
@@ -296,6 +312,27 @@ def test_convert_to_openai_function(
|
||||
runnable_expected["parameters"] = parameters
|
||||
assert actual == runnable_expected
|
||||
|
||||
# Test simple Tool
|
||||
def my_function(input_string: str) -> str:
|
||||
pass
|
||||
|
||||
tool = Tool(
|
||||
name="dummy_function",
|
||||
func=my_function,
|
||||
description="test description",
|
||||
)
|
||||
actual = convert_to_openai_function(tool)
|
||||
expected = {
|
||||
"name": "dummy_function",
|
||||
"description": "test description",
|
||||
"parameters": {
|
||||
"properties": {"__arg1": {"title": "__arg1", "type": "string"}},
|
||||
"required": ["__arg1"],
|
||||
"type": "object",
|
||||
},
|
||||
}
|
||||
assert actual == expected
|
||||
|
||||
|
||||
@pytest.mark.xfail(reason="Direct pydantic v2 models not yet supported")
|
||||
def test_convert_to_openai_function_nested_v2() -> None:
|
||||
|
||||
@@ -8,6 +8,7 @@ from pydantic import ConfigDict
|
||||
from langchain_core.utils.pydantic import (
|
||||
PYDANTIC_MAJOR_VERSION,
|
||||
_create_subset_model_v2,
|
||||
create_model_v2,
|
||||
get_fields,
|
||||
is_basemodel_instance,
|
||||
is_basemodel_subclass,
|
||||
@@ -194,3 +195,35 @@ def test_fields_pydantic_v1_from_2() -> None:
|
||||
|
||||
fields = get_fields(Foo)
|
||||
assert fields == {"x": Foo.__fields__["x"]}
|
||||
|
||||
|
||||
def test_create_model_v2() -> None:
|
||||
"""Test that create model v2 works as expected."""
|
||||
|
||||
with pytest.warns(None) as record: # type: ignore
|
||||
foo = create_model_v2("Foo", field_definitions={"a": (int, None)})
|
||||
foo.model_json_schema()
|
||||
|
||||
assert list(record) == []
|
||||
|
||||
# schema is used by pydantic, but OK to re-use
|
||||
with pytest.warns(None) as record: # type: ignore
|
||||
foo = create_model_v2("Foo", field_definitions={"schema": (int, None)})
|
||||
foo.model_json_schema()
|
||||
|
||||
assert list(record) == []
|
||||
|
||||
# From protected namespaces, but definitely OK to use.
|
||||
with pytest.warns(None) as record: # type: ignore
|
||||
foo = create_model_v2("Foo", field_definitions={"model_id": (int, None)})
|
||||
foo.model_json_schema()
|
||||
|
||||
assert list(record) == []
|
||||
|
||||
with pytest.warns(None) as record: # type: ignore
|
||||
# Verify that we can use non-English characters
|
||||
field_name = "もしもし"
|
||||
foo = create_model_v2("Foo", field_definitions={field_name: (int, None)})
|
||||
foo.model_json_schema()
|
||||
|
||||
assert list(record) == []
|
||||
|
||||
106
libs/langchain/poetry.lock
generated
106
libs/langchain/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
@@ -568,6 +568,37 @@ files = [
|
||||
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
|
||||
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
|
||||
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
|
||||
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
|
||||
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1894,7 +1925,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev4"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -1903,7 +1934,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.112"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -3815,7 +3846,9 @@ python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:95d0b2cf8791ab5fb9e3aa3d9a79a0d5d51f55b6357eecf532a120ba3b5524db"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:243f92596f4fd4c8bd30ab8e8dd5965afe226363d75cab2468f2c707f64cd83b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ea54f7300553af0a2a7235e9b85f4204e1fc21848f917a3213b0e0818de9a24"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173f5f122d2e1bff8fbd9f7811b7942bead1f5e9f371cdf9e670b327e6703ebd"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:196958cde924a00488e3e83ff917be3b73cd4ed8352bbc0f2989333176d1c54d"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd90c221ed4e60ac9d476db967f436cfcecbd4ef744537c0f2d5291439848768"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-win32.whl", hash = "sha256:3166dfff2d16fe9be3241ee60ece6fcb01cf8e74dd7c5e0b64f8e19fab44911b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp310-cp310-win_amd64.whl", hash = "sha256:6831a78bbd3c40f909b3e5233f87341f12d0b34a58f14115c9e94b4cdaf726d3"},
|
||||
@@ -3836,19 +3869,25 @@ files = [
|
||||
{file = "SQLAlchemy-2.0.34-cp312-cp312-win32.whl", hash = "sha256:fbb034f565ecbe6c530dff948239377ba859420d146d5f62f0271407ffb8c580"},
|
||||
{file = "SQLAlchemy-2.0.34-cp312-cp312-win_amd64.whl", hash = "sha256:707c8f44931a4facd4149b52b75b80544a8d824162602b8cd2fe788207307f9a"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:24af3dc43568f3780b7e1e57c49b41d98b2d940c1fd2e62d65d3928b6f95f021"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60ed6ef0a35c6b76b7640fe452d0e47acc832ccbb8475de549a5cc5f90c2c06"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:413c85cd0177c23e32dee6898c67a5f49296640041d98fddb2c40888fe4daa2e"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:25691f4adfb9d5e796fd48bf1432272f95f4bbe5f89c475a788f31232ea6afba"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:526ce723265643dbc4c7efb54f56648cc30e7abe20f387d763364b3ce7506c82"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-win32.whl", hash = "sha256:13be2cc683b76977a700948411a94c67ad8faf542fa7da2a4b167f2244781cf3"},
|
||||
{file = "SQLAlchemy-2.0.34-cp37-cp37m-win_amd64.whl", hash = "sha256:e54ef33ea80d464c3dcfe881eb00ad5921b60f8115ea1a30d781653edc2fd6a2"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:43f28005141165edd11fbbf1541c920bd29e167b8bbc1fb410d4fe2269c1667a"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b68094b165a9e930aedef90725a8fcfafe9ef95370cbb54abc0464062dbf808f"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a1e03db964e9d32f112bae36f0cc1dcd1988d096cfd75d6a588a3c3def9ab2b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203d46bddeaa7982f9c3cc693e5bc93db476ab5de9d4b4640d5c99ff219bee8c"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ae92bebca3b1e6bd203494e5ef919a60fb6dfe4d9a47ed2453211d3bd451b9f5"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9661268415f450c95f72f0ac1217cc6f10256f860eed85c2ae32e75b60278ad8"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-win32.whl", hash = "sha256:895184dfef8708e15f7516bd930bda7e50ead069280d2ce09ba11781b630a434"},
|
||||
{file = "SQLAlchemy-2.0.34-cp38-cp38-win_amd64.whl", hash = "sha256:6e7cde3a2221aa89247944cafb1b26616380e30c63e37ed19ff0bba5e968688d"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbcdf987f3aceef9763b6d7b1fd3e4ee210ddd26cac421d78b3c206d07b2700b"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ce119fc4ce0d64124d37f66a6f2a584fddc3c5001755f8a49f1ca0a177ef9796"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a17d8fac6df9835d8e2b4c5523666e7051d0897a93756518a1fe101c7f47f2f0"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ebc11c54c6ecdd07bb4efbfa1554538982f5432dfb8456958b6d46b9f834bb7"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e6965346fc1491a566e019a4a1d3dfc081ce7ac1a736536367ca305da6472a8"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:220574e78ad986aea8e81ac68821e47ea9202b7e44f251b7ed8c66d9ae3f4278"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-win32.whl", hash = "sha256:b75b00083e7fe6621ce13cfce9d4469c4774e55e8e9d38c305b37f13cf1e874c"},
|
||||
{file = "SQLAlchemy-2.0.34-cp39-cp39-win_amd64.whl", hash = "sha256:c29d03e0adf3cc1a8c3ec62d176824972ae29b67a66cbb18daff3062acc6faa8"},
|
||||
@@ -4200,20 +4239,6 @@ files = [
|
||||
cryptography = ">=35.0.0"
|
||||
types-pyOpenSSL = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.31.0.6"
|
||||
description = "Typing stubs for requests"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"},
|
||||
{file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
types-urllib3 = "*"
|
||||
|
||||
[[package]]
|
||||
name = "types-requests"
|
||||
version = "2.32.0.20240907"
|
||||
@@ -4250,17 +4275,6 @@ files = [
|
||||
{file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-urllib3"
|
||||
version = "1.26.25.14"
|
||||
description = "Typing stubs for urllib3"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"},
|
||||
{file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.12.2"
|
||||
@@ -4297,22 +4311,6 @@ files = [
|
||||
[package.extras]
|
||||
dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.20"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e"},
|
||||
{file = "urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "2.2.2"
|
||||
@@ -4330,6 +4328,23 @@ h2 = ["h2 (>=4,<5)"]
|
||||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "vcrpy"
|
||||
version = "4.3.0"
|
||||
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "vcrpy-4.3.0-py2.py3-none-any.whl", hash = "sha256:8fbd4be412e8a7f35f623dd61034e6380a1c8dbd0edf6e87277a3289f6e98093"},
|
||||
{file = "vcrpy-4.3.0.tar.gz", hash = "sha256:49c270ce67e826dba027d83e20d25b67a5885487697e97bca6dbdf53d750a0ac"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
PyYAML = "*"
|
||||
six = ">=1.5"
|
||||
wrapt = "*"
|
||||
yarl = "*"
|
||||
|
||||
[[package]]
|
||||
name = "vcrpy"
|
||||
version = "6.0.1"
|
||||
@@ -4342,7 +4357,6 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
PyYAML = "*"
|
||||
urllib3 = {version = "<2", markers = "platform_python_implementation == \"PyPy\" or python_version < \"3.10\""}
|
||||
wrapt = "*"
|
||||
yarl = "*"
|
||||
|
||||
@@ -4661,4 +4675,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "d96302c2ca74bc4594b3aa702aea7c75fe55429c09bedbb114117d47a0ea072c"
|
||||
content-hash = "6285e2bb94bd02186bb92baa07d9af80829719f033c289f55af2a389a26758bc"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0.dev2"
|
||||
description = "Building applications with LLMs through composability"
|
||||
authors = []
|
||||
license = "MIT"
|
||||
@@ -33,7 +33,7 @@ langchain-server = "langchain.server:main"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = { version = "^0.3.0.dev2", allow-prereleases = true }
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
langchain-text-splitters = { version = "^0.3.0.dev1", allow-prereleases = true }
|
||||
langsmith = "^0.1.17"
|
||||
pydantic = "^2.7.4"
|
||||
|
||||
@@ -802,21 +802,17 @@ class ChatAnthropic(BaseChatModel):
|
||||
] = None,
|
||||
**kwargs: Any,
|
||||
) -> Runnable[LanguageModelInput, BaseMessage]:
|
||||
"""Bind tool-like objects to this chat model.
|
||||
r"""Bind tool-like objects to this chat model.
|
||||
|
||||
Args:
|
||||
tools: A list of tool definitions to bind to this chat model.
|
||||
Supports Anthropic format tool schemas and any tool definition handled
|
||||
by :meth:`langchain_core.utils.function_calling.convert_to_openai_tool`.
|
||||
tool_choice: Which tool to require the model to call.
|
||||
Options are:
|
||||
- name of the tool (str): calls corresponding tool;
|
||||
- ``"auto"`` or None: automatically selects a tool (including no tool);
|
||||
- ``"any"``: force at least one tool to be called;
|
||||
- or a dict of the form:
|
||||
``{"type": "tool", "name": "tool_name"}``,
|
||||
or ``{"type: "any"}``,
|
||||
or ``{"type: "auto"}``;
|
||||
tool_choice: Which tool to require the model to call. Options are:
|
||||
|
||||
- name of the tool as a string or as dict ``{"type": "tool", "name": "<<tool_name>>"}``: calls corresponding tool;
|
||||
- ``"auto"``, ``{"type: "auto"}``, or None: automatically selects a tool (including no tool);
|
||||
- ``"any"`` or ``{"type: "any"}``: force at least one tool to be called;
|
||||
kwargs: Any additional parameters are passed directly to
|
||||
``self.bind(**kwargs)``.
|
||||
|
||||
@@ -927,12 +923,14 @@ class ChatAnthropic(BaseChatModel):
|
||||
llm_with_tools.invoke("what is the weather like in San Francisco",)
|
||||
|
||||
This outputs:
|
||||
.. code-block:: pycon
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
AIMessage(content=[{'text': "Certainly! I can help you find out the current weather in San Francisco. To get this information, I'll use the GetWeather function. Let me fetch that data for you right away.", 'type': 'text'}, {'id': 'toolu_01TS5h8LNo7p5imcG7yRiaUM', 'input': {'location': 'San Francisco, CA'}, 'name': 'GetWeather', 'type': 'tool_use'}], response_metadata={'id': 'msg_01Xg7Wr5inFWgBxE5jH9rpRo', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 171, 'output_tokens': 96, 'cache_creation_input_tokens': 1470, 'cache_read_input_tokens': 0}}, id='run-b36a5b54-5d69-470e-a1b0-b932d00b089e-0', tool_calls=[{'name': 'GetWeather', 'args': {'location': 'San Francisco, CA'}, 'id': 'toolu_01TS5h8LNo7p5imcG7yRiaUM', 'type': 'tool_call'}], usage_metadata={'input_tokens': 171, 'output_tokens': 96, 'total_tokens': 267})
|
||||
|
||||
If we invoke the tool again, we can see that the "usage" information in the AIMessage.response_metadata shows that we had a cache hit:
|
||||
.. code-block:: pycon
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
AIMessage(content=[{'text': 'To get the current weather in San Francisco, I can use the GetWeather function. Let me check that for you.', 'type': 'text'}, {'id': 'toolu_01HtVtY1qhMFdPprx42qU2eA', 'input': {'location': 'San Francisco, CA'}, 'name': 'GetWeather', 'type': 'tool_use'}], response_metadata={'id': 'msg_016RfWHrRvW6DAGCdwB6Ac64', 'model': 'claude-3-5-sonnet-20240620', 'stop_reason': 'tool_use', 'stop_sequence': None, 'usage': {'input_tokens': 171, 'output_tokens': 82, 'cache_creation_input_tokens': 0, 'cache_read_input_tokens': 1470}}, id='run-88b1f825-dcb7-4277-ac27-53df55d22001-0', tool_calls=[{'name': 'GetWeather', 'args': {'location': 'San Francisco, CA'}, 'id': 'toolu_01HtVtY1qhMFdPprx42qU2eA', 'type': 'tool_call'}], usage_metadata={'input_tokens': 171, 'output_tokens': 82, 'total_tokens': 253})
|
||||
|
||||
|
||||
@@ -464,7 +464,7 @@ class ChatFireworks(BaseChatModel):
|
||||
default_chunk_class: Type[BaseMessageChunk] = AIMessageChunk
|
||||
for chunk in self.client.create(messages=message_dicts, **params):
|
||||
if not isinstance(chunk, dict):
|
||||
chunk = chunk.dict()
|
||||
chunk = chunk.model_dump()
|
||||
if len(chunk["choices"]) == 0:
|
||||
continue
|
||||
choice = chunk["choices"][0]
|
||||
@@ -520,7 +520,7 @@ class ChatFireworks(BaseChatModel):
|
||||
def _create_chat_result(self, response: Union[dict, BaseModel]) -> ChatResult:
|
||||
generations = []
|
||||
if not isinstance(response, dict):
|
||||
response = response.dict()
|
||||
response = response.model_dump()
|
||||
token_usage = response.get("usage", {})
|
||||
for res in response["choices"]:
|
||||
message = _convert_dict_to_message(res["message"])
|
||||
@@ -558,7 +558,7 @@ class ChatFireworks(BaseChatModel):
|
||||
default_chunk_class: Type[BaseMessageChunk] = AIMessageChunk
|
||||
async for chunk in self.async_client.acreate(messages=message_dicts, **params):
|
||||
if not isinstance(chunk, dict):
|
||||
chunk = chunk.dict()
|
||||
chunk = chunk.model_dump()
|
||||
if len(chunk["choices"]) == 0:
|
||||
continue
|
||||
choice = chunk["choices"][0]
|
||||
|
||||
6
libs/partners/fireworks/poetry.lock
generated
6
libs/partners/fireworks/poetry.lock
generated
@@ -667,7 +667,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev4"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -676,7 +676,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.112"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -1695,4 +1695,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "2a324e4a90491192ea7b6b846a799acbcbcf7c8ea8fa00e0534e00d447d3ecbc"
|
||||
content-hash = "9fd45aa6c8db9fc54fc39ac68a97c3c71d8903bb19a2fa706564569b2d1c76f6"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-fireworks"
|
||||
version = "0.2.0.dev1"
|
||||
version = "0.2.0.dev2"
|
||||
description = "An integration package connecting Fireworks and LangChain"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
@@ -20,7 +20,7 @@ disallow_untyped_defs = "True"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = { version = "^0.3.0.dev1", allow-prereleases = true }
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
fireworks-ai = ">=0.13.0"
|
||||
openai = "^1.10.0"
|
||||
requests = "^2"
|
||||
|
||||
@@ -62,6 +62,7 @@ class PineconeEmbeddings(BaseModel, Embeddings):
|
||||
model_config = ConfigDict(
|
||||
extra="forbid",
|
||||
populate_by_name=True,
|
||||
protected_namespaces=(),
|
||||
)
|
||||
|
||||
@model_validator(mode="before")
|
||||
|
||||
199
libs/partners/pinecone/poetry.lock
generated
199
libs/partners/pinecone/poetry.lock
generated
@@ -1,118 +1,91 @@
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiohappyeyeballs"
|
||||
version = "2.4.0"
|
||||
description = "Happy Eyeballs for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "aiohappyeyeballs-2.4.0-py3-none-any.whl", hash = "sha256:7ce92076e249169a13c2f49320d1967425eaf1f407522d707d59cac7628d62bd"},
|
||||
{file = "aiohappyeyeballs-2.4.0.tar.gz", hash = "sha256:55a1714f084e63d49639800f95716da97a1f173d46a16dfcfda0016abb93b6b2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aiohttp"
|
||||
version = "3.10.5"
|
||||
version = "3.9.5"
|
||||
description = "Async http client/server framework (asyncio)"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:18a01eba2574fb9edd5f6e5fb25f66e6ce061da5dab5db75e13fe1558142e0a3"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:94fac7c6e77ccb1ca91e9eb4cb0ac0270b9fb9b289738654120ba8cebb1189c6"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f1f1c75c395991ce9c94d3e4aa96e5c59c8356a15b1c9231e783865e2772699"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f7acae3cf1a2a2361ec4c8e787eaaa86a94171d2417aae53c0cca6ca3118ff6"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94c4381ffba9cc508b37d2e536b418d5ea9cfdc2848b9a7fea6aebad4ec6aac1"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c31ad0c0c507894e3eaa843415841995bf8de4d6b2d24c6e33099f4bc9fc0d4f"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0912b8a8fadeb32ff67a3ed44249448c20148397c1ed905d5dac185b4ca547bb"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d93400c18596b7dc4794d48a63fb361b01a0d8eb39f28800dc900c8fbdaca91"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d00f3c5e0d764a5c9aa5a62d99728c56d455310bcc288a79cab10157b3af426f"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d742c36ed44f2798c8d3f4bc511f479b9ceef2b93f348671184139e7d708042c"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:814375093edae5f1cb31e3407997cf3eacefb9010f96df10d64829362ae2df69"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8224f98be68a84b19f48e0bdc14224b5a71339aff3a27df69989fa47d01296f3"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d9a487ef090aea982d748b1b0d74fe7c3950b109df967630a20584f9a99c0683"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-win32.whl", hash = "sha256:d9ef084e3dc690ad50137cc05831c52b6ca428096e6deb3c43e95827f531d5ef"},
|
||||
{file = "aiohttp-3.10.5-cp310-cp310-win_amd64.whl", hash = "sha256:66bf9234e08fe561dccd62083bf67400bdbf1c67ba9efdc3dac03650e97c6088"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c6a4e5e40156d72a40241a25cc226051c0a8d816610097a8e8f517aeacd59a2"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c634a3207a5445be65536d38c13791904fda0748b9eabf908d3fe86a52941cf"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4aff049b5e629ef9b3e9e617fa6e2dfeda1bf87e01bcfecaf3949af9e210105e"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1942244f00baaacaa8155eca94dbd9e8cc7017deb69b75ef67c78e89fdad3c77"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04a1f2a65ad2f93aa20f9ff9f1b672bf912413e5547f60749fa2ef8a644e061"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f2bfc0032a00405d4af2ba27f3c429e851d04fad1e5ceee4080a1c570476697"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424ae21498790e12eb759040bbb504e5e280cab64693d14775c54269fd1d2bb7"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:975218eee0e6d24eb336d0328c768ebc5d617609affaca5dbbd6dd1984f16ed0"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4120d7fefa1e2d8fb6f650b11489710091788de554e2b6f8347c7a20ceb003f5"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b90078989ef3fc45cf9221d3859acd1108af7560c52397ff4ace8ad7052a132e"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ba5a8b74c2a8af7d862399cdedce1533642fa727def0b8c3e3e02fcb52dca1b1"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:02594361128f780eecc2a29939d9dfc870e17b45178a867bf61a11b2a4367277"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8fb4fc029e135859f533025bc82047334e24b0d489e75513144f25408ecaf058"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-win32.whl", hash = "sha256:e1ca1ef5ba129718a8fc827b0867f6aa4e893c56eb00003b7367f8a733a9b072"},
|
||||
{file = "aiohttp-3.10.5-cp311-cp311-win_amd64.whl", hash = "sha256:349ef8a73a7c5665cca65c88ab24abe75447e28aa3bc4c93ea5093474dfdf0ff"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:305be5ff2081fa1d283a76113b8df7a14c10d75602a38d9f012935df20731487"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3a1c32a19ee6bbde02f1cb189e13a71b321256cc1d431196a9f824050b160d5a"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:61645818edd40cc6f455b851277a21bf420ce347baa0b86eaa41d51ef58ba23d"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c225286f2b13bab5987425558baa5cbdb2bc925b2998038fa028245ef421e75"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ba01ebc6175e1e6b7275c907a3a36be48a2d487549b656aa90c8a910d9f3178"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8eaf44ccbc4e35762683078b72bf293f476561d8b68ec8a64f98cf32811c323e"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c43eb1ab7cbf411b8e387dc169acb31f0ca0d8c09ba63f9eac67829585b44f"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de7a5299827253023c55ea549444e058c0eb496931fa05d693b95140a947cb73"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4790f0e15f00058f7599dab2b206d3049d7ac464dc2e5eae0e93fa18aee9e7bf"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:44b324a6b8376a23e6ba25d368726ee3bc281e6ab306db80b5819999c737d820"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0d277cfb304118079e7044aad0b76685d30ecb86f83a0711fc5fb257ffe832ca"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:54d9ddea424cd19d3ff6128601a4a4d23d54a421f9b4c0fff740505813739a91"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4f1c9866ccf48a6df2b06823e6ae80573529f2af3a0992ec4fe75b1a510df8a6"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-win32.whl", hash = "sha256:dc4826823121783dccc0871e3f405417ac116055bf184ac04c36f98b75aacd12"},
|
||||
{file = "aiohttp-3.10.5-cp312-cp312-win_amd64.whl", hash = "sha256:22c0a23a3b3138a6bf76fc553789cb1a703836da86b0f306b6f0dc1617398abc"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7f6b639c36734eaa80a6c152a238242bedcee9b953f23bb887e9102976343092"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f29930bc2921cef955ba39a3ff87d2c4398a0394ae217f41cb02d5c26c8b1b77"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f489a2c9e6455d87eabf907ac0b7d230a9786be43fbe884ad184ddf9e9c1e385"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:123dd5b16b75b2962d0fff566effb7a065e33cd4538c1692fb31c3bda2bfb972"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b98e698dc34966e5976e10bbca6d26d6724e6bdea853c7c10162a3235aba6e16"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3b9162bab7e42f21243effc822652dc5bb5e8ff42a4eb62fe7782bcbcdfacf6"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1923a5c44061bffd5eebeef58cecf68096e35003907d8201a4d0d6f6e387ccaa"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d55f011da0a843c3d3df2c2cf4e537b8070a419f891c930245f05d329c4b0689"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:afe16a84498441d05e9189a15900640a2d2b5e76cf4efe8cbb088ab4f112ee57"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8112fb501b1e0567a1251a2fd0747baae60a4ab325a871e975b7bb67e59221f"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:1e72589da4c90337837fdfe2026ae1952c0f4a6e793adbbfbdd40efed7c63599"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4d46c7b4173415d8e583045fbc4daa48b40e31b19ce595b8d92cf639396c15d5"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33e6bc4bab477c772a541f76cd91e11ccb6d2efa2b8d7d7883591dfb523e5987"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-win32.whl", hash = "sha256:c58c6837a2c2a7cf3133983e64173aec11f9c2cd8e87ec2fdc16ce727bcf1a04"},
|
||||
{file = "aiohttp-3.10.5-cp313-cp313-win_amd64.whl", hash = "sha256:38172a70005252b6893088c0f5e8a47d173df7cc2b2bd88650957eb84fcf5022"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f6f18898ace4bcd2d41a122916475344a87f1dfdec626ecde9ee802a711bc569"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5ede29d91a40ba22ac1b922ef510aab871652f6c88ef60b9dcdf773c6d32ad7a"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:673f988370f5954df96cc31fd99c7312a3af0a97f09e407399f61583f30da9bc"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58718e181c56a3c02d25b09d4115eb02aafe1a732ce5714ab70326d9776457c3"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b38b1570242fbab8d86a84128fb5b5234a2f70c2e32f3070143a6d94bc854cf"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:074d1bff0163e107e97bd48cad9f928fa5a3eb4b9d33366137ffce08a63e37fe"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd31f176429cecbc1ba499d4aba31aaccfea488f418d60376b911269d3b883c5"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7384d0b87d4635ec38db9263e6a3f1eb609e2e06087f0aa7f63b76833737b471"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8989f46f3d7ef79585e98fa991e6ded55d2f48ae56d2c9fa5e491a6e4effb589"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c83f7a107abb89a227d6c454c613e7606c12a42b9a4ca9c5d7dad25d47c776ae"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cde98f323d6bf161041e7627a5fd763f9fd829bcfcd089804a5fdce7bb6e1b7d"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:676f94c5480d8eefd97c0c7e3953315e4d8c2b71f3b49539beb2aa676c58272f"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:2d21ac12dc943c68135ff858c3a989f2194a709e6e10b4c8977d7fcd67dfd511"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-win32.whl", hash = "sha256:17e997105bd1a260850272bfb50e2a328e029c941c2708170d9d978d5a30ad9a"},
|
||||
{file = "aiohttp-3.10.5-cp38-cp38-win_amd64.whl", hash = "sha256:1c19de68896747a2aa6257ae4cf6ef59d73917a36a35ee9d0a6f48cff0f94db8"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7e2fe37ac654032db1f3499fe56e77190282534810e2a8e833141a021faaab0e"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f5bf3ead3cb66ab990ee2561373b009db5bc0e857549b6c9ba84b20bc462e172"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b2c16a919d936ca87a3c5f0e43af12a89a3ce7ccbce59a2d6784caba945b68b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad146dae5977c4dd435eb31373b3fe9b0b1bf26858c6fc452bf6af394067e10b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c5c6fa16412b35999320f5c9690c0f554392dc222c04e559217e0f9ae244b92"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95c4dc6f61d610bc0ee1edc6f29d993f10febfe5b76bb470b486d90bbece6b22"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da452c2c322e9ce0cfef392e469a26d63d42860f829026a63374fde6b5c5876f"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:898715cf566ec2869d5cb4d5fb4be408964704c46c96b4be267442d265390f32"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:391cc3a9c1527e424c6865e087897e766a917f15dddb360174a70467572ac6ce"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:380f926b51b92d02a34119d072f178d80bbda334d1a7e10fa22d467a66e494db"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce91db90dbf37bb6fa0997f26574107e1b9d5ff939315247b7e615baa8ec313b"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9093a81e18c45227eebe4c16124ebf3e0d893830c6aca7cc310bfca8fe59d857"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ee40b40aa753d844162dcc80d0fe256b87cba48ca0054f64e68000453caead11"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-win32.whl", hash = "sha256:03f2645adbe17f274444953bdea69f8327e9d278d961d85657cb0d06864814c1"},
|
||||
{file = "aiohttp-3.10.5-cp39-cp39-win_amd64.whl", hash = "sha256:d17920f18e6ee090bdd3d0bfffd769d9f2cb4c8ffde3eb203777a3895c128862"},
|
||||
{file = "aiohttp-3.10.5.tar.gz", hash = "sha256:f071854b47d39591ce9a17981c46790acb30518e2f83dfca8db2dfa091178691"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
|
||||
{file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
|
||||
{file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
|
||||
{file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
|
||||
{file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
|
||||
{file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
|
||||
{file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
aiohappyeyeballs = ">=2.3.0"
|
||||
aiosignal = ">=1.1.2"
|
||||
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
|
||||
attrs = ">=17.3.0"
|
||||
@@ -121,7 +94,7 @@ multidict = ">=4.5,<7.0"
|
||||
yarl = ">=1.0,<2.0"
|
||||
|
||||
[package.extras]
|
||||
speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
|
||||
speedups = ["Brotli", "aiodns", "brotlicffi"]
|
||||
|
||||
[[package]]
|
||||
name = "aiosignal"
|
||||
@@ -639,7 +612,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -648,7 +621,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.75"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -661,7 +634,7 @@ url = "../../core"
|
||||
|
||||
[[package]]
|
||||
name = "langchain-openai"
|
||||
version = "0.1.23"
|
||||
version = "0.2.0.dev2"
|
||||
description = "An integration package connecting OpenAI and LangChain"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -669,7 +642,7 @@ files = []
|
||||
develop = true
|
||||
|
||||
[package.dependencies]
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-core = "^0.3.0.dev1"
|
||||
openai = "^1.40.0"
|
||||
tiktoken = ">=0.7,<1"
|
||||
|
||||
@@ -679,13 +652,13 @@ url = "../openai"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.115"
|
||||
version = "0.1.118"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.115-py3-none-any.whl", hash = "sha256:04e35cfd4c2d4ff1ea10bb577ff43957b05ebb3d9eb4e06e200701f4a2b4ac9f"},
|
||||
{file = "langsmith-0.1.115.tar.gz", hash = "sha256:3b775377d858d32354f3ee0dd1ed637068cfe9a1f13e7b3bfa82db1615cdffc9"},
|
||||
{file = "langsmith-0.1.118-py3-none-any.whl", hash = "sha256:f017127b3efb037da5e46ff4f8583e8192e7955191737240c327f3eadc144d7c"},
|
||||
{file = "langsmith-0.1.118.tar.gz", hash = "sha256:ff1ca06c92c6081250244ebbce5d0bb347b9d898d2e9b60a13b11f0f0720f09f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1908,4 +1881,4 @@ multidict = ">=4.0"
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.13"
|
||||
content-hash = "86a67d355f5c4f67f156c55dd85030ec0f4c0ce736d9122114f4dc166fdbdc80"
|
||||
content-hash = "c2f7f222f8c3c0c46e4feea598c66fc6efb1c0090353ee9bba2bd656a3854850"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-pinecone"
|
||||
version = "0.1.3"
|
||||
version = "0.2.0.dev1"
|
||||
description = "An integration package connecting Pinecone and LangChain"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
@@ -20,9 +20,9 @@ disallow_untyped_defs = "True"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<3.13"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
pinecone-client = "^5.0.0"
|
||||
aiohttp = "^3.9.5"
|
||||
aiohttp = ">=3.9.5,<3.10"
|
||||
|
||||
[[tool.poetry.dependencies.numpy]]
|
||||
version = "^1"
|
||||
|
||||
@@ -91,6 +91,7 @@ class TestPinecone:
|
||||
)
|
||||
time.sleep(DEFAULT_SLEEP) # prevent race condition
|
||||
output = docsearch.similarity_search(unique_id, k=1, namespace=NAMESPACE_NAME)
|
||||
output[0].id = None # overwrite ID for ease of comparison
|
||||
assert output == [Document(page_content=needs)]
|
||||
|
||||
def test_from_texts_with_metadatas(
|
||||
@@ -115,6 +116,7 @@ class TestPinecone:
|
||||
time.sleep(DEFAULT_SLEEP) # prevent race condition
|
||||
output = docsearch.similarity_search(needs, k=1, namespace=namespace)
|
||||
|
||||
output[0].id = None
|
||||
# TODO: why metadata={"page": 0.0}) instead of {"page": 0}?
|
||||
assert output == [Document(page_content=needs, metadata={"page": 0.0})]
|
||||
|
||||
@@ -140,6 +142,8 @@ class TestPinecone:
|
||||
sorted_documents = sorted(docs, key=lambda x: x.metadata["page"])
|
||||
print(sorted_documents) # noqa: T201
|
||||
|
||||
for document in sorted_documents:
|
||||
document.id = None # overwrite IDs for ease of comparison
|
||||
# TODO: why metadata={"page": 0.0}) instead of {"page": 0}, etc???
|
||||
assert sorted_documents == [
|
||||
Document(page_content="foo", metadata={"page": 0.0}),
|
||||
|
||||
12
libs/partners/qdrant/poetry.lock
generated
12
libs/partners/qdrant/poetry.lock
generated
@@ -620,7 +620,7 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "langchain-core"
|
||||
version = "0.3.0.dev1"
|
||||
version = "0.3.0.dev5"
|
||||
description = "Building applications with LLMs through composability"
|
||||
optional = false
|
||||
python-versions = ">=3.9,<4.0"
|
||||
@@ -629,7 +629,7 @@ develop = true
|
||||
|
||||
[package.dependencies]
|
||||
jsonpatch = "^1.33"
|
||||
langsmith = "^0.1.75"
|
||||
langsmith = "^0.1.117"
|
||||
packaging = ">=23.2,<25"
|
||||
pydantic = "^2.7.4"
|
||||
PyYAML = ">=5.3"
|
||||
@@ -642,13 +642,13 @@ url = "../../core"
|
||||
|
||||
[[package]]
|
||||
name = "langsmith"
|
||||
version = "0.1.115"
|
||||
version = "0.1.118"
|
||||
description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8.1"
|
||||
files = [
|
||||
{file = "langsmith-0.1.115-py3-none-any.whl", hash = "sha256:04e35cfd4c2d4ff1ea10bb577ff43957b05ebb3d9eb4e06e200701f4a2b4ac9f"},
|
||||
{file = "langsmith-0.1.115.tar.gz", hash = "sha256:3b775377d858d32354f3ee0dd1ed637068cfe9a1f13e7b3bfa82db1615cdffc9"},
|
||||
{file = "langsmith-0.1.118-py3-none-any.whl", hash = "sha256:f017127b3efb037da5e46ff4f8583e8192e7955191737240c327f3eadc144d7c"},
|
||||
{file = "langsmith-0.1.118.tar.gz", hash = "sha256:ff1ca06c92c6081250244ebbce5d0bb347b9d898d2e9b60a13b11f0f0720f09f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2056,4 +2056,4 @@ fastembed = ["fastembed"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<4.0"
|
||||
content-hash = "5ee9ce7ba2e1f6f2cc0ef47c529c25da7c1e67cf49768624789eeeddfe40be6d"
|
||||
content-hash = "97912fb88d52004e107efe17259b1e31e03bf5134d5b28694cfe6b65b8402c1a"
|
||||
|
||||
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry]
|
||||
name = "langchain-qdrant"
|
||||
version = "0.1.3"
|
||||
version = "0.2.0.dev1"
|
||||
description = "An integration package connecting Qdrant and LangChain"
|
||||
authors = []
|
||||
readme = "README.md"
|
||||
@@ -23,9 +23,8 @@ disallow_untyped_defs = true
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.9,<4.0"
|
||||
langchain-core = "^0.3.0.dev"
|
||||
langchain-core = { version = "^0.3.0.dev5", allow-prereleases = true }
|
||||
qdrant-client = "^1.10.1"
|
||||
pydantic = "^2.7.4"
|
||||
fastembed = { version = "^0.3.3", python = ">=3.9,<3.13", optional = true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
||||
@@ -302,6 +302,28 @@ class ChatModelIntegrationTests(ChatModelTests):
|
||||
assert isinstance(chunk, dict) # for mypy
|
||||
assert set(chunk.keys()) == {"setup", "punchline"}
|
||||
|
||||
def test_structured_output_optional_param(self, model: BaseChatModel) -> None:
|
||||
"""Test to verify structured output with an optional param."""
|
||||
if not self.has_tool_calling:
|
||||
pytest.skip("Test requires tool calling.")
|
||||
|
||||
class Joke(BaseModel):
|
||||
"""Joke to tell user."""
|
||||
|
||||
setup: str = Field(description="question to set up a joke")
|
||||
punchline: Optional[str] = Field(
|
||||
default=None, description="answer to resolve the joke"
|
||||
)
|
||||
|
||||
chat = model.with_structured_output(Joke) # type: ignore[arg-type]
|
||||
setup_result = chat.invoke(
|
||||
"Give me the setup to a joke about cats, no punchline."
|
||||
)
|
||||
assert isinstance(setup_result, Joke)
|
||||
|
||||
joke_result = chat.invoke("Give me a joke about cats, include the punchline.")
|
||||
assert isinstance(joke_result, Joke)
|
||||
|
||||
def test_tool_message_histories_string_content(self, model: BaseChatModel) -> None:
|
||||
"""
|
||||
Test that message histories are compatible with string tool contents
|
||||
|
||||
Reference in New Issue
Block a user