various templates improvements (#12500)

This commit is contained in:
Harrison Chase
2023-10-28 22:13:22 -07:00
committed by GitHub
parent d85d4d7822
commit 9e0ae56287
50 changed files with 462 additions and 282 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2023 LangChain, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,14 @@
# Extraction with Anthropic Function Calling
This template enables [Anthropic function calling](https://python.langchain.com/docs/integrations/chat/extraction_anthropic_functions).
This is a wrapper around Anthropic's API that uses prompting and output parsing to replicate the OpenAI functions experience.
Specify the information you want to extract in `chain.py`
By default, it will extract the title and author of papers.
## LLM
This template will use `Claude2` by default.
Be sure that `ANTHROPIC_API_KEY` is set in your enviorment.

View File

@@ -0,0 +1,83 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "4ae4b789",
"metadata": {},
"source": [
"## Document Loading\n",
"\n",
"Load a blog post on agents."
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "5d6bd62e",
"metadata": {},
"outputs": [],
"source": [
"from langchain.document_loaders import WebBaseLoader\n",
"loader = WebBaseLoader(\"https://lilianweng.github.io/posts/2023-06-23-agent/\")\n",
"text = loader.load()"
]
},
{
"cell_type": "markdown",
"id": "8e21575d",
"metadata": {},
"source": [
"## Run Template\n",
"\n",
"As shown in the README, add template and start server:\n",
"```\n",
"langchain serve add extraction-anthropic-functions\n",
"langchain start\n",
"```\n",
"\n",
"We can now look at the endpoints:\n",
"\n",
"http://127.0.0.1:8000/docs#\n",
"\n",
"And specifically at our loaded template:\n",
"\n",
"http://127.0.0.1:8000/docs#/default/invoke_extraction-anthropic-functions_invoke_post\n",
" \n",
"We can also use remote runnable to call it:"
]
},
{
"cell_type": "code",
"execution_count": 4,
"id": "92edba86",
"metadata": {},
"outputs": [],
"source": [
"from langserve.client import RemoteRunnable\n",
"anthropic_function_model = RemoteRunnable('http://localhost:8000/extraction-anthropic-functions')\n",
"anthropic_function_model.invoke(text[0].page_content[0:1500])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "langserve",
"language": "python",
"name": "langserve"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.16"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -0,0 +1,3 @@
from extraction_anthropic_functions.chain import chain
__all__ = ["chain"]

View File

@@ -0,0 +1,38 @@
from typing import List, Optional
from langchain.output_parsers.openai_functions import JsonKeyOutputFunctionsParser
from langchain.prompts import ChatPromptTemplate
from langchain.pydantic_v1 import BaseModel
from langchain.utils.openai_functions import convert_pydantic_to_openai_function
from langchain_experimental.llms.anthropic_functions import AnthropicFunctions
template = """A article will be passed to you. Extract from it all papers that are mentioned by this article.
Do not extract the name of the article itself. If no papers are mentioned that's fine - you don't need to extract any! Just return an empty list.
Do not make up or guess ANY extra information. Only extract what exactly is in the text.""" # noqa: E501
prompt = ChatPromptTemplate.from_messages([("system", template), ("human", "{input}")])
# Function output schema
class Paper(BaseModel):
"""Information about papers mentioned."""
title: str
author: Optional[str]
class Info(BaseModel):
"""Information to extract"""
papers: List[Paper]
# Function definition
model = AnthropicFunctions()
function = [convert_pydantic_to_openai_function(Info)]
chain = prompt | model.bind(
functions=function, function_call={"name": "Info"}
) | JsonKeyOutputFunctionsParser(key_name="papers")

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,21 @@
[tool.poetry]
name = "extraction-anthropic-functions"
version = "0.1.0"
description = ""
authors = ["Lance Martin <lance@langchain.dev>"]
readme = "README.md"
[tool.poetry.dependencies]
python = ">=3.8.1,<4.0"
langchain = ">=0.0.322"
anthropic = ">=0.5.0"
langchainhub = ">=0.1.13"
langchain-experimental = "^0.0.36"
[tool.langserve]
export_module = "extraction_anthropic_functions"
export_attr = "chain"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"