diff --git a/docs/docs/integrations/memory/recallio_memory.ipynb b/docs/docs/integrations/memory/recallio_memory.ipynb new file mode 100644 index 00000000000..3ecb7718ebf --- /dev/null +++ b/docs/docs/integrations/memory/recallio_memory.ipynb @@ -0,0 +1,215 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# RecallioMemory + LangChain Integration Demo\n", + "A minimal notebook to show drop-in usage of RecallioMemory in LangChain (with scoped writes and recall)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install recallio langchain langchain-recallio openai" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup: API Keys & Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_recallio.memory import RecallioMemory\n", + "from langchain_openai import ChatOpenAI\n", + "from langchain.prompts import ChatPromptTemplate\n", + "import os\n", + "\n", + "# Set your keys here or use environment variables\n", + "RECALLIO_API_KEY = os.getenv(\"RECALLIO_API_KEY\", \"YOUR_RECALLIO_API_KEY\")\n", + "OPENAI_API_KEY = os.getenv(\"OPENAI_API_KEY\", \"YOUR_OPENAI_API_KEY\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Initialize RecallioMemory" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "memory = RecallioMemory(\n", + " project_id=\"project_abc\",\n", + " api_key=RECALLIO_API_KEY,\n", + " session_id=\"demo-session-001\",\n", + " user_id=\"demo-user-42\",\n", + " default_tags=[\"test\", \"langchain\"],\n", + " return_messages=True,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Build a LangChain ConversationChain with RecallioMemory" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# You can swap in any supported LLM here\n", + "llm = ChatOpenAI(api_key=OPENAI_API_KEY, temperature=0)\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\n", + " \"system\",\n", + " \"The following is a friendly conversation between a human and an AI. \"\n", + " \"The AI is talkative and provides lots of specific details from its context. \"\n", + " \"If the AI does not know the answer to a question, it truthfully says it does not know.\",\n", + " ),\n", + " (\"placeholder\", \"{history}\"), # RecallioMemory will fill this slot\n", + " (\"human\", \"{input}\"),\n", + " ]\n", + ")\n", + "\n", + "# LCEL chain that returns an AIMessage\n", + "base_chain = prompt | llm\n", + "\n", + "\n", + "# Create a stateful chain using RecallioMemory\n", + "def chat_with_memory(user_input: str):\n", + " # Load conversation history from memory\n", + " memory_vars = memory.load_memory_variables({\"input\": user_input})\n", + "\n", + " # Run the chain with history and user input\n", + " response = base_chain.invoke(\n", + " {\"input\": user_input, \"history\": memory_vars.get(\"history\", \"\")}\n", + " )\n", + "\n", + " # Save the conversation to memory\n", + " memory.save_context({\"input\": user_input}, {\"output\": response.content})\n", + "\n", + " return response" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Example: Chat with Memory" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Bot: Hello Guillaume! It's nice to meet you. How can I assist you today?\n" + ] + } + ], + "source": [ + "# First user message – note the AI remembers the name\n", + "resp1 = chat_with_memory(\"Hi! My name is Guillaume. Remember that.\")\n", + "print(\"Bot:\", resp1.content)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Bot: Your name is Guillaume.\n" + ] + } + ], + "source": [ + "# Second user message – AI should recall the name from memory\n", + "resp2 = chat_with_memory(\"What is my name?\")\n", + "print(\"Bot:\", resp2.content)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## See What Is Stored in Recallio\n", + "This is for debugging/demo only; in production, you wouldn't do this on every run." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Current memory variables: {'history': [HumanMessage(content='Name is Guillaume', additional_kwargs={}, response_metadata={})]}\n" + ] + } + ], + "source": [ + "print(\"Current memory variables:\", memory.load_memory_variables({}))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Clear Memory (Optional Cleanup - Requires Manager level Key)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# memory.clear()\n", + "# print(\"Memory cleared.\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.10" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/integrations/providers/recallio.ipynb b/docs/docs/integrations/providers/recallio.ipynb new file mode 100644 index 00000000000..375663b181b --- /dev/null +++ b/docs/docs/integrations/providers/recallio.ipynb @@ -0,0 +1,31 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Recallio\n", + "\n", + "[Recallio](https://recallio.ai/) is a powerfull API allowing to store, index, and retrieve application “memories” with built-in fact extraction, dynamic summaries, reranked recall, and a full knowledge-graph layer.\n", + "\n", + "\n", + "## Installation\n", + "\n", + "```bash\n", + "pip install langchain-recallio\n", + "```\n", + "\n", + "```python\n", + "from langchain_recallio.memory import RecallioMemory\n", + "```" + ] + } + ], + "metadata": { + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/libs/packages.yml b/libs/packages.yml index 312d9efae6f..49eff9d7b58 100644 --- a/libs/packages.yml +++ b/libs/packages.yml @@ -700,6 +700,9 @@ packages: - name: langchain-tensorlake path: . repo: tensorlakeai/langchain-tensorlake +- name: langchain-recallio + path: . + repo: recallio/langchain-recallio downloads: 781 downloads_updated_at: '2025-08-10T21:38:36.795416+00:00' - name: langchain-gradient diff --git a/uv.lock b/uv.lock index 28ffabb41d2..273d0668026 100644 --- a/uv.lock +++ b/uv.lock @@ -2814,7 +2814,7 @@ typing = [] [[package]] name = "langchain-openai" -version = "0.3.29" +version = "0.3.30" source = { editable = "libs/partners/openai" } dependencies = [ { name = "langchain-core" }, @@ -2825,14 +2825,14 @@ dependencies = [ [package.metadata] requires-dist = [ { name = "langchain-core", editable = "libs/core" }, - { name = "openai", specifier = ">=1.86.0,<2.0.0" }, + { name = "openai", specifier = ">=1.99.9,<2.0.0" }, { name = "tiktoken", specifier = ">=0.7,<1" }, ] [package.metadata.requires-dev] codespell = [{ name = "codespell", specifier = ">=2.2.0,<3.0.0" }] dev = [{ name = "langchain-core", editable = "libs/core" }] -lint = [{ name = "ruff", specifier = ">=0.12.2,<0.13" }] +lint = [{ name = "ruff", specifier = ">=0.12.8,<0.13" }] test = [ { name = "freezegun", specifier = ">=1.2.2,<2.0.0" }, { name = "langchain-core", editable = "libs/core" }, @@ -2858,7 +2858,7 @@ test-integration = [ ] typing = [ { name = "langchain-core", editable = "libs/core" }, - { name = "mypy", specifier = ">=1.10,<2.0" }, + { name = "mypy", specifier = ">=1.17.1,<2.0" }, { name = "types-tqdm", specifier = ">=4.66.0.5,<5.0.0.0" }, ] @@ -2922,7 +2922,7 @@ test-integration = [ ] typing = [ { name = "lxml-stubs", specifier = ">=0.5.1,<1.0.0" }, - { name = "mypy", specifier = ">=1.15,<2.0" }, + { name = "mypy", specifier = ">=1.17.1,<1.18" }, { name = "tiktoken", specifier = ">=0.8.0,<1.0.0" }, { name = "types-requests", specifier = ">=2.31.0.20240218,<3.0.0.0" }, ] @@ -3971,7 +3971,7 @@ wheels = [ [[package]] name = "openai" -version = "1.88.0" +version = "1.99.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -3983,9 +3983,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/ea/bbeef604d1fe0f7e9111745bb8a81362973a95713b28855beb9a9832ab12/openai-1.88.0.tar.gz", hash = "sha256:122d35e42998255cf1fc84560f6ee49a844e65c054cd05d3e42fda506b832bb1", size = 470963, upload-time = "2025-06-17T05:04:45.856Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/d2/ef89c6f3f36b13b06e271d3cc984ddd2f62508a0972c1cbcc8485a6644ff/openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92", size = 506992, upload-time = "2025-08-12T02:31:10.054Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/03/ef68d77a38dd383cbed7fc898857d394d5a8b0520a35f054e7fe05dc3ac1/openai-1.88.0-py3-none-any.whl", hash = "sha256:7edd7826b3b83f5846562a6f310f040c79576278bf8e3687b30ba05bb5dff978", size = 734293, upload-time = "2025-06-17T05:04:43.858Z" }, + { url = "https://files.pythonhosted.org/packages/e8/fb/df274ca10698ee77b07bff952f302ea627cc12dac6b85289485dd77db6de/openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a", size = 786816, upload-time = "2025-08-12T02:31:08.34Z" }, ] [[package]]