diff --git a/libs/cli/langchain_cli/namespaces/serve.py b/libs/cli/langchain_cli/namespaces/serve.py index 94c794eaeb5..22c5c030cb2 100644 --- a/libs/cli/langchain_cli/namespaces/serve.py +++ b/libs/cli/langchain_cli/namespaces/serve.py @@ -95,7 +95,6 @@ def add( e.g.: langchain serve add extraction-openai-functions langchain serve add git+ssh://git@github.com/efriis/simple-pirate.git - langchain serve add git+https://github.com/efriis/hub.git#devbranch#subdirectory=mypackage """ parsed_deps = parse_dependencies(dependencies, repo, branch, api_path) @@ -200,7 +199,8 @@ def add( ] lines = ( - ["", "Great! Add the following to your app:", ""] + imports + [""] + routes + ["", "Great! Add the following to your app:\n\n```", ""] + + imports + [""] + routes + ["```"] ) typer.echo("\n".join(lines)) diff --git a/templates/anthropic-functions/README.md b/templates/anthropic-functions/README.md deleted file mode 100644 index bc0f29e8c24..00000000000 --- a/templates/anthropic-functions/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# Function calling with Anthropic - -This template enables [Anthropic function calling](https://python.langchain.com/docs/integrations/chat/anthropic_functions). - -Function calling can be used for various tasks, such as extraction or tagging. - -Specify the function you want to use in `chain.py` - -By default, it will tag the input text using the following fields: - -* sentiment -* aggressiveness -* language - -## LLM - -This template will use `Claude2` by default. - -Be sure that `ANTHROPIC_API_KEY` is set in your enviorment. - -## Adding the template - -Create your LangServe app: -``` -langchain serve new my-app -cd my-app -``` - -Add template: -``` -langchain serve add anthropic-functions -``` - -Start server: -``` -langchain start -``` - -See Jupyter notebook `anthropic_functions` for various way to connect to the template. \ No newline at end of file diff --git a/templates/anthropic-functions/anthropic_functions/__init__.py b/templates/anthropic-functions/anthropic_functions/__init__.py deleted file mode 100644 index e3c87e24a8d..00000000000 --- a/templates/anthropic-functions/anthropic_functions/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from anthropic_functions.chain import chain - -__all__ = ["chain"] diff --git a/templates/anthropic-functions/anthropic_functions/chain.py b/templates/anthropic-functions/anthropic_functions/chain.py deleted file mode 100644 index 3d51bed0603..00000000000 --- a/templates/anthropic-functions/anthropic_functions/chain.py +++ /dev/null @@ -1,15 +0,0 @@ -from langchain.chains import create_tagging_chain -from langchain_experimental.llms.anthropic_functions import AnthropicFunctions - -model = AnthropicFunctions(model='claude-2') - -schema = { - "properties": { - "sentiment": {"type": "string"}, - "aggressiveness": {"type": "integer"}, - "language": {"type": "string"}, - } -} - -# This is LLMChain, which implements invoke -chain = create_tagging_chain(schema, model) \ No newline at end of file diff --git a/templates/anthropic-iterative-search/anthropic_iterative_search/chain.py b/templates/anthropic-iterative-search/anthropic_iterative_search/chain.py index ba2a7d2bcd2..7b29a932b59 100644 --- a/templates/anthropic-iterative-search/anthropic_iterative_search/chain.py +++ b/templates/anthropic-iterative-search/anthropic_iterative_search/chain.py @@ -1,5 +1,6 @@ from langchain.chat_models import ChatAnthropic from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from .prompts import answer_prompt @@ -13,3 +14,12 @@ chain = { "query": lambda x: x["query"], "information": executor | (lambda x: x["output"]) } | prompt | model | StrOutputParser() + +# Add typing for the inputs to be used in the playground + + +class Inputs(BaseModel): + query: str + + +chain = chain.with_types(input_type=Inputs) diff --git a/templates/anthropic-iterative-search/pyproject.toml b/templates/anthropic-iterative-search/pyproject.toml index 34057727c98..45a82a468ff 100644 --- a/templates/anthropic-iterative-search/pyproject.toml +++ b/templates/anthropic-iterative-search/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "anthropic_iterative_search" +name = "anthropic-iterative-search" version = "0.0.1" description = "" authors = [] diff --git a/templates/cassandra-entomology-rag/pyproject.toml b/templates/cassandra-entomology-rag/pyproject.toml index 83ef07c4410..b84377ab5bf 100644 --- a/templates/cassandra-entomology-rag/pyproject.toml +++ b/templates/cassandra-entomology-rag/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "cassandra_entomology_rag" +name = "cassandra-entomology-rag" version = "0.0.1" description = "" authors = ["Stefano Lottini "] diff --git a/templates/cassandra-synonym-caching/pyproject.toml b/templates/cassandra-synonym-caching/pyproject.toml index 838417b5218..b416ab58389 100644 --- a/templates/cassandra-synonym-caching/pyproject.toml +++ b/templates/cassandra-synonym-caching/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "cassandra_synonym_caching" +name = "cassandra-synonym-caching" version = "0.0.1" description = "" authors = ["Stefano Lottini "] diff --git a/templates/csv-agent/csv_agent/agent.py b/templates/csv-agent/csv_agent/agent.py index 9d04f36694f..6927cd78060 100644 --- a/templates/csv-agent/csv_agent/agent.py +++ b/templates/csv-agent/csv_agent/agent.py @@ -5,10 +5,10 @@ from langchain.agents import AgentExecutor, OpenAIFunctionsAgent from langchain.chat_models import ChatOpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder +from langchain.pydantic_v1 import BaseModel, Field from langchain.tools.retriever import create_retriever_tool from langchain.vectorstores import FAISS from langchain_experimental.tools import PythonAstREPLTool -from pydantic import BaseModel, Field MAIN_DIR = Path(__file__).parents[1] @@ -49,7 +49,7 @@ class PythonInputs(BaseModel): query: str = Field(description="code snippet to run") -df = pd.read_csv("titanic.csv") +df = pd.read_csv(MAIN_DIR / "titanic.csv") template = TEMPLATE.format(dhead=df.head().to_markdown()) prompt = ChatPromptTemplate.from_messages( @@ -72,4 +72,13 @@ agent = OpenAIFunctionsAgent( ) agent_executor = AgentExecutor( agent=agent, tools=tools, max_iterations=5, early_stopping_method="generate" -) +)| (lambda x: x["output"]) + +# Typing for playground inputs + + +class AgentInputs(BaseModel): + input: str + + +agent_executor = agent_executor.with_types(input_type=AgentInputs) diff --git a/templates/csv-agent/pyproject.toml b/templates/csv-agent/pyproject.toml index b121d4d1481..b318ac72bb9 100644 --- a/templates/csv-agent/pyproject.toml +++ b/templates/csv-agent/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "csv_agent" +name = "csv-agent" version = "0.0.1" description = "" authors = [] @@ -15,11 +15,11 @@ pandas = "^2.1.1" setuptools = "^68.2.2" tabulate = "^0.9.0" pydantic = "<2" -langchain-experimental = "^0.0.32" +langchain-experimental = "^0.0.36" [tool.langserve] -export_module = "csv_agent.chain" -export_attr = "chain" +export_module = "csv_agent.agent" +export_attr = "agent_executor" [build-system] diff --git a/templates/csv-agent/titanic_data/index.faiss b/templates/csv-agent/titanic_data/index.faiss index ef979cf6620..b7bfe99b6c1 100644 Binary files a/templates/csv-agent/titanic_data/index.faiss and b/templates/csv-agent/titanic_data/index.faiss differ diff --git a/templates/csv-agent/titanic_data/index.pkl b/templates/csv-agent/titanic_data/index.pkl new file mode 100644 index 00000000000..4b8073486a2 Binary files /dev/null and b/templates/csv-agent/titanic_data/index.pkl differ diff --git a/templates/docs/CONTRIBUTING.md b/templates/docs/CONTRIBUTING.md index 2d4f0d1f49e..c352e780627 100644 --- a/templates/docs/CONTRIBUTING.md +++ b/templates/docs/CONTRIBUTING.md @@ -6,7 +6,7 @@ If you need any help at all, please reach out! To contribute a new template, first fork this repository. Then clone that fork and pull it down locally. -Set up an appropriate dev environment, and make sure you are in this `template` directory. +Set up an appropriate dev environment, and make sure you are in this `templates` directory. Make sure you have `langchain-cli` installed. diff --git a/templates/elastic-query-generator/README.md b/templates/elastic-query-generator/README.md index de5fedcc4b5..02cdd8ffbfe 100644 --- a/templates/elastic-query-generator/README.md +++ b/templates/elastic-query-generator/README.md @@ -20,9 +20,12 @@ Create a free trial account on [Elastic Cloud](https://cloud.elastic.co/registra With a deployment, update the connection string. -Password and connection (elasticsearch url) can be found on the deployment console. +Password and connection (elasticsearch url) can be found on the deployment console. Th -```bash -> export ELASTIC_SEARCH_SERVER="https://elastic:@" +## Populating with data If you want to populate the DB with some example info, you can run `python ingest.py`. + +This will create a `customers` index. +In the chain, we specify indexes to generate queries against, and we specify `["customers"]`. +This is specific to setting up your Elastic index in this diff --git a/templates/elastic-query-generator/elastic_query_generator/chain.py b/templates/elastic-query-generator/elastic_query_generator/chain.py index aa4575ed027..0e29b359674 100644 --- a/templates/elastic-query-generator/elastic_query_generator/chain.py +++ b/templates/elastic-query-generator/elastic_query_generator/chain.py @@ -1,26 +1,47 @@ -import os -from pathlib import Path - from elasticsearch import Elasticsearch from langchain.chat_models import ChatOpenAI from langchain.output_parsers.json import SimpleJsonOutputParser +from langchain.pydantic_v1 import BaseModel from .elastic_index_info import get_indices_infos from .prompts import DSL_PROMPT -es_host = os.environ["ELASTIC_SEARCH_SERVER"] -es_password = os.environ["ELASTIC_PASSWORD"] +# Setup Elasticsearch +# This shows how to set it up for a cloud hosted version +# Password for the 'elastic' user generated by Elasticsearch +ELASTIC_PASSWORD = "..." + +# Found in the 'Manage Deployment' page +CLOUD_ID = "..." + +# Create the client instance db = Elasticsearch( - es_host, - http_auth=('elastic', es_password), - ca_certs=Path(__file__).parents[1] / 'http_ca.crt' # Replace with your actual path + cloud_id=CLOUD_ID, + basic_auth=("elastic", ELASTIC_PASSWORD) ) +# Specify indices to include +# If you want to use on your own indices, you will need to change this. +INCLUDE_INDICES = ["customers"] + +# With the Elasticsearch connection created, we can now move on to the chain + _model = ChatOpenAI(temperature=0, model="gpt-4") chain = { "input": lambda x: x["input"], - "indices_info": lambda _: get_indices_infos(db), + # This line only get index info for "customers" index. + # If you are running this on your own data, you will want to change. + "indices_info": lambda _: get_indices_infos(db, include_indices=INCLUDE_INDICES), "top_k": lambda x: x.get("top_k", 5), } | DSL_PROMPT | _model | SimpleJsonOutputParser() + + +# Nicely typed inputs for playground +class ChainInputs(BaseModel): + input: str + top_k: int = 5 + + +chain = chain.with_types(input_type=ChainInputs) diff --git a/templates/elastic-query-generator/elastic_query_generator/elastic_index_info.py b/templates/elastic-query-generator/elastic_query_generator/elastic_index_info.py index db328389eac..91c33ca8df2 100644 --- a/templates/elastic-query-generator/elastic_query_generator/elastic_index_info.py +++ b/templates/elastic-query-generator/elastic_query_generator/elastic_index_info.py @@ -13,8 +13,18 @@ def _list_indices(database, include_indices=None, ignore_indices=None) -> List[s return all_indices -def get_indices_infos(database, sample_documents_in_index_info=5) -> str: - indices = _list_indices(database) + +def get_indices_infos( + database, + sample_documents_in_index_info=5, + include_indices=None, + ignore_indices=None +) -> str: + indices = _list_indices( + database, + include_indices=include_indices, + ignore_indices=ignore_indices + ) mappings = database.indices.get_mapping(index=",".join(indices)) if sample_documents_in_index_info > 0: for k, v in mappings.items(): diff --git a/templates/elastic-query-generator/ingest.py b/templates/elastic-query-generator/ingest.py index 757a2c6a1ba..a3b64938c74 100644 --- a/templates/elastic-query-generator/ingest.py +++ b/templates/elastic-query-generator/ingest.py @@ -1,14 +1,18 @@ -import os - from elasticsearch import Elasticsearch -es_host = os.environ["ELASTIC_SEARCH_SERVER"] -es_password = os.environ["ELASTIC_PASSWORD"] +# Setup Elasticsearch +# This shows how to set it up for a cloud hosted version +# Password for the 'elastic' user generated by Elasticsearch +ELASTIC_PASSWORD = "..." + +# Found in the 'Manage Deployment' page +CLOUD_ID = "..." + +# Create the client instance db = Elasticsearch( - es_host, - http_auth=('elastic', es_password), - ca_certs='http_ca.crt' # Replace with your actual path + cloud_id=CLOUD_ID, + basic_auth=("elastic", ELASTIC_PASSWORD) ) customers = [ diff --git a/templates/elastic-query-generator/pyproject.toml b/templates/elastic-query-generator/pyproject.toml index 5b4bc4d45b3..09bf1a0cf6f 100644 --- a/templates/elastic-query-generator/pyproject.toml +++ b/templates/elastic-query-generator/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "elastic_query_generator" +name = "elastic-query-generator" version = "0.0.1" description = "" authors = [] @@ -9,6 +9,7 @@ readme = "README.md" python = ">=3.8.1,<4.0" langchain = ">=0.0.313" elasticsearch = "^8.10.1" +openai = ">=0.27.9" [tool.langserve] export_module = "elastic_query_generator.chain" diff --git a/templates/anthropic-functions/LICENSE b/templates/extraction-anthropic-functions/LICENSE similarity index 100% rename from templates/anthropic-functions/LICENSE rename to templates/extraction-anthropic-functions/LICENSE diff --git a/templates/extraction-anthropic-functions/README.md b/templates/extraction-anthropic-functions/README.md new file mode 100644 index 00000000000..f735f183a28 --- /dev/null +++ b/templates/extraction-anthropic-functions/README.md @@ -0,0 +1,14 @@ +# Extraction with Anthropic Function Calling + +This template enables [Anthropic function calling](https://python.langchain.com/docs/integrations/chat/extraction_anthropic_functions). +This is a wrapper around Anthropic's API that uses prompting and output parsing to replicate the OpenAI functions experience. + +Specify the information you want to extract in `chain.py` + +By default, it will extract the title and author of papers. + +## LLM + +This template will use `Claude2` by default. + +Be sure that `ANTHROPIC_API_KEY` is set in your enviorment. diff --git a/templates/anthropic-functions/anthropic_functions.ipynb b/templates/extraction-anthropic-functions/extraction_anthropic_functions.ipynb similarity index 89% rename from templates/anthropic-functions/anthropic_functions.ipynb rename to templates/extraction-anthropic-functions/extraction_anthropic_functions.ipynb index 938a22a19d9..2204b91ff6d 100644 --- a/templates/anthropic-functions/anthropic_functions.ipynb +++ b/templates/extraction-anthropic-functions/extraction_anthropic_functions.ipynb @@ -31,7 +31,7 @@ "\n", "As shown in the README, add template and start server:\n", "```\n", - "langchain serve add anthropic-functions\n", + "langchain serve add extraction-anthropic-functions\n", "langchain start\n", "```\n", "\n", @@ -41,7 +41,7 @@ "\n", "And specifically at our loaded template:\n", "\n", - "http://127.0.0.1:8000/docs#/default/invoke_anthropic-functions_invoke_post\n", + "http://127.0.0.1:8000/docs#/default/invoke_extraction-anthropic-functions_invoke_post\n", " \n", "We can also use remote runnable to call it:" ] @@ -54,7 +54,7 @@ "outputs": [], "source": [ "from langserve.client import RemoteRunnable\n", - "anthropic_function_model = RemoteRunnable('http://localhost:8000/anthropic-functions')\n", + "anthropic_function_model = RemoteRunnable('http://localhost:8000/extraction-anthropic-functions')\n", "anthropic_function_model.invoke(text[0].page_content[0:1500])" ] } diff --git a/templates/extraction-anthropic-functions/extraction_anthropic_functions/__init__.py b/templates/extraction-anthropic-functions/extraction_anthropic_functions/__init__.py new file mode 100644 index 00000000000..558da85cce7 --- /dev/null +++ b/templates/extraction-anthropic-functions/extraction_anthropic_functions/__init__.py @@ -0,0 +1,3 @@ +from extraction_anthropic_functions.chain import chain + +__all__ = ["chain"] diff --git a/templates/extraction-anthropic-functions/extraction_anthropic_functions/chain.py b/templates/extraction-anthropic-functions/extraction_anthropic_functions/chain.py new file mode 100644 index 00000000000..ff43134630b --- /dev/null +++ b/templates/extraction-anthropic-functions/extraction_anthropic_functions/chain.py @@ -0,0 +1,38 @@ +from typing import List, Optional + +from langchain.output_parsers.openai_functions import JsonKeyOutputFunctionsParser +from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel +from langchain.utils.openai_functions import convert_pydantic_to_openai_function +from langchain_experimental.llms.anthropic_functions import AnthropicFunctions + +template = """A article will be passed to you. Extract from it all papers that are mentioned by this article. + +Do not extract the name of the article itself. If no papers are mentioned that's fine - you don't need to extract any! Just return an empty list. + +Do not make up or guess ANY extra information. Only extract what exactly is in the text.""" # noqa: E501 + +prompt = ChatPromptTemplate.from_messages([("system", template), ("human", "{input}")]) + + +# Function output schema +class Paper(BaseModel): + """Information about papers mentioned.""" + + title: str + author: Optional[str] + + +class Info(BaseModel): + """Information to extract""" + + papers: List[Paper] + + +# Function definition +model = AnthropicFunctions() +function = [convert_pydantic_to_openai_function(Info)] + +chain = prompt | model.bind( + functions=function, function_call={"name": "Info"} +) | JsonKeyOutputFunctionsParser(key_name="papers") diff --git a/templates/anthropic-functions/poetry.lock b/templates/extraction-anthropic-functions/poetry.lock similarity index 100% rename from templates/anthropic-functions/poetry.lock rename to templates/extraction-anthropic-functions/poetry.lock diff --git a/templates/anthropic-functions/pyproject.toml b/templates/extraction-anthropic-functions/pyproject.toml similarity index 74% rename from templates/anthropic-functions/pyproject.toml rename to templates/extraction-anthropic-functions/pyproject.toml index e37a4f0004c..d6077b1ef3f 100644 --- a/templates/anthropic-functions/pyproject.toml +++ b/templates/extraction-anthropic-functions/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "anthropic-function" +name = "extraction-anthropic-functions" version = "0.1.0" description = "" authors = ["Lance Martin "] @@ -10,10 +10,10 @@ python = ">=3.8.1,<4.0" langchain = ">=0.0.322" anthropic = ">=0.5.0" langchainhub = ">=0.1.13" -langchain-experimental = "^0.0.33" +langchain-experimental = "^0.0.36" [tool.langserve] -export_module = "anthropic_functions" +export_module = "extraction_anthropic_functions" export_attr = "chain" [build-system] diff --git a/templates/anthropic-functions/tests/__init__.py b/templates/extraction-anthropic-functions/tests/__init__.py similarity index 100% rename from templates/anthropic-functions/tests/__init__.py rename to templates/extraction-anthropic-functions/tests/__init__.py diff --git a/templates/hyde/hyde/chain.py b/templates/hyde/hyde/chain.py index 915c994d6c3..a6503c9447f 100644 --- a/templates/hyde/hyde/chain.py +++ b/templates/hyde/hyde/chain.py @@ -1,8 +1,9 @@ from langchain.chat_models import ChatOpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser -from langchain.schema.runnable import RunnableParallel, RunnablePassthrough +from langchain.schema.runnable import RunnableParallel from langchain.vectorstores import Chroma from hyde.prompts import hyde_prompt @@ -47,21 +48,28 @@ prompt = ChatPromptTemplate.from_template(template) # LLM model = ChatOpenAI() +# Query transformation chain +# This transforms the query into the hypothetical document +hyde_chain = hyde_prompt | model | StrOutputParser() + # RAG chain chain = ( RunnableParallel( { - # Configure the input, pass it the prompt, pass that to the model, - # and then the result to the retriever - "context": {"input": RunnablePassthrough()} - | hyde_prompt - | model - | StrOutputParser() - | retriever, - "question": RunnablePassthrough(), + # Generate a hypothetical document and then pass it to the retriever + "context": hyde_chain | retriever, + "question": lambda x: x["question"], } ) | prompt | model | StrOutputParser() ) + + +# Add input types for playground +class ChainInput(BaseModel): + question: str + + +chain = chain.with_types(input_type=ChainInput) diff --git a/templates/hyde/hyde/prompts.py b/templates/hyde/hyde/prompts.py index b6d2f0881b0..8da1a88c59c 100644 --- a/templates/hyde/hyde/prompts.py +++ b/templates/hyde/hyde/prompts.py @@ -3,16 +3,16 @@ from langchain.prompts.prompt import PromptTemplate # There are a few different templates to choose from # These are just different ways to generate hypothetical documents web_search_template = """Please write a passage to answer the question -Question: {input} +Question: {question} Passage:""" sci_fact_template = """Please write a scientific paper passage to support/refute the claim -Claim: {input} +Claim: {question} Passage:""" # noqa: E501 fiqa_template = """Please write a financial article passage to answer the question -Question: {input} +Question: {question} Passage:""" trec_news_template = """Please write a news passage about the topic. -Topic: {input} +Topic: {question} Passage:""" # For the sake of this example we will use the web search template diff --git a/templates/hyde/pyproject.toml b/templates/hyde/pyproject.toml index a678d8fe1f4..c184ae12daf 100644 --- a/templates/hyde/pyproject.toml +++ b/templates/hyde/pyproject.toml @@ -9,6 +9,8 @@ readme = "README.md" python = ">=3.8.1,<4.0" langchain = ">=0.0.313, <0.1" openai = "^0.28.1" +chromadb = "^0.4.15" +tiktoken = "^0.5.1" [tool.poetry.group.dev.dependencies] poethepoet = "^0.24.1" diff --git a/templates/llama2-functions/llama2_functions/chain.py b/templates/llama2-functions/llama2_functions/chain.py index 80537540d56..0a377f8a089 100644 --- a/templates/llama2-functions/llama2_functions/chain.py +++ b/templates/llama2-functions/llama2_functions/chain.py @@ -1,4 +1,3 @@ -from langchain.chat_models import ChatOpenAI from langchain.llms import Replicate from langchain.prompts import ChatPromptTemplate @@ -39,8 +38,7 @@ Respond with json that adheres to the following jsonschema: prompt = ChatPromptTemplate.from_messages([("system", template), ("human", "{input}")]) -# Chain -model = ChatOpenAI() +# Chain chain = ( prompt | model diff --git a/templates/neo4j-cypher-ft/pyproject.toml b/templates/neo4j-cypher-ft/pyproject.toml index 570f4088a0f..f7107215276 100644 --- a/templates/neo4j-cypher-ft/pyproject.toml +++ b/templates/neo4j-cypher-ft/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "neo4j_cypher" +name = "neo4j-cypher-ft" version = "0.1.0" description = "" authors = ["Tomaz Bratanic "] @@ -12,7 +12,7 @@ neo4j = ">5.12" openai = "^0.28.1" [tool.langserve] -export_module = "neo4j.cypher" +export_module = "neo4j_cypher_ft" export_attr = "chain" [build-system] diff --git a/templates/neo4j-cypher/pyproject.toml b/templates/neo4j-cypher/pyproject.toml index 570f4088a0f..a3cf3cc8eea 100644 --- a/templates/neo4j-cypher/pyproject.toml +++ b/templates/neo4j-cypher/pyproject.toml @@ -12,7 +12,7 @@ neo4j = ">5.12" openai = "^0.28.1" [tool.langserve] -export_module = "neo4j.cypher" +export_module = "neo4j_cypher" export_attr = "chain" [build-system] diff --git a/templates/neo4j-generation/pyproject.toml b/templates/neo4j-generation/pyproject.toml index f49693f37dc..b26d853e1e6 100644 --- a/templates/neo4j-generation/pyproject.toml +++ b/templates/neo4j-generation/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "neo4j_generation" +name = "neo4j-generation" version = "0.0.1" description = "" authors = ["Tomaz Bratanic "] diff --git a/templates/neo4j-parent/ingest.py b/templates/neo4j-parent/ingest.py index f6a3f27e4ae..e1b4d05ac63 100644 --- a/templates/neo4j-parent/ingest.py +++ b/templates/neo4j-parent/ingest.py @@ -11,7 +11,7 @@ txt_path = Path(__file__).parent / "dune.txt" graph = Neo4jGraph() # Load the text file -loader = TextLoader(txt_path) +loader = TextLoader(str(txt_path)) documents = loader.load() # Define chunking strategy diff --git a/templates/neo4j-parent/neo4j_parent/chain.py b/templates/neo4j-parent/neo4j_parent/chain.py index 59acbd662f8..60ed5fed535 100644 --- a/templates/neo4j-parent/neo4j_parent/chain.py +++ b/templates/neo4j-parent/neo4j_parent/chain.py @@ -1,6 +1,7 @@ from langchain.chat_models import ChatOpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableParallel, RunnablePassthrough from langchain.vectorstores import Neo4jVector @@ -34,4 +35,10 @@ chain = ( | prompt | model | StrOutputParser() -) \ No newline at end of file +) + +# Add typing for input +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) diff --git a/templates/neo4j-parent/poetry.lock b/templates/neo4j-parent/poetry.lock index 46acd442ce9..c2d81d69e3a 100644 --- a/templates/neo4j-parent/poetry.lock +++ b/templates/neo4j-parent/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiohttp" version = "3.8.6" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -113,7 +112,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -128,7 +126,6 @@ frozenlist = ">=1.1.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -143,7 +140,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -165,7 +161,6 @@ trio = ["trio (<0.22)"] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +172,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -196,7 +190,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -208,7 +201,6 @@ files = [ name = "charset-normalizer" version = "3.3.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -304,11 +296,21 @@ files = [ {file = "charset_normalizer-3.3.1-py3-none-any.whl", hash = "sha256:800561453acdecedaac137bf09cd719c7a440b6800ec182f077bb8e7025fb708"}, ] +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + [[package]] name = "dataclasses-json" version = "0.6.1" description = "Easily serialize dataclasses to and from JSON." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -324,7 +326,6 @@ typing-inspect = ">=0.4.0,<1" name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -339,7 +340,6 @@ test = ["pytest (>=6)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -410,7 +410,6 @@ files = [ name = "greenlet" version = "3.0.0" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -486,7 +485,6 @@ test = ["objgraph", "psutil"] name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -498,7 +496,6 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -513,7 +510,6 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -525,7 +521,6 @@ files = [ name = "langchain" version = "0.0.320" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -566,7 +561,6 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] name = "langsmith" version = "0.0.49" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -582,7 +576,6 @@ requests = ">=2,<3" name = "marshmallow" version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -603,7 +596,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -687,7 +679,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -695,11 +686,28 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "neo4j" +version = "5.14.0" +description = "Neo4j Bolt driver for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "neo4j-5.14.0.tar.gz", hash = "sha256:6040efca47126c01385f09e550fb7d7671b1853a1e1c34908aa3713cebd285da"}, +] + +[package.dependencies] +pytz = "*" + +[package.extras] +numpy = ["numpy (>=1.7.0,<2.0.0)"] +pandas = ["numpy (>=1.7.0,<2.0.0)", "pandas (>=1.1.0,<3.0.0)"] +pyarrow = ["pyarrow (>=1.0.0)"] + [[package]] name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -733,11 +741,32 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "openai" +version = "0.28.1" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.28.1-py3-none-any.whl", hash = "sha256:d18690f9e3d31eedb66b57b88c2165d760b24ea0a01f150dd3f068155088ce68"}, + {file = "openai-0.28.1.tar.gz", hash = "sha256:4be1dad329a65b4ce1a660fe6d5431b438f429b5855c883435f0f7fcb6d2dcc8"}, +] + +[package.dependencies] +aiohttp = "*" +requests = ">=2.20" +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -749,7 +778,6 @@ files = [ name = "pydantic" version = "2.4.2" description = "Data validation using Python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -769,7 +797,6 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.10.1" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -884,11 +911,21 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + [[package]] name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -897,6 +934,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -904,8 +942,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -922,6 +967,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -929,6 +975,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -938,7 +985,6 @@ files = [ name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1036,7 +1082,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1058,7 +1103,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1070,7 +1114,6 @@ files = [ name = "sqlalchemy" version = "2.0.22" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1157,7 +1200,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1172,7 +1214,6 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "tiktoken" version = "0.5.1" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1214,11 +1255,30 @@ requests = ">=2.26.0" [package.extras] blobfile = ["blobfile (>=2)"] +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1230,7 +1290,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -1246,7 +1305,6 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1264,7 +1322,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1351,4 +1408,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "5f7ec22b74a2fbd7fd55bad55b08e19a23632286ed3ccba147ef05dd3276175b" +content-hash = "e28c9b86457ac18538b8019550fd52a3a39dd128033caedbd8436c1099918765" diff --git a/templates/neo4j-parent/pyproject.toml b/templates/neo4j-parent/pyproject.toml index 00cb10c3a62..7e05d7b0d4a 100644 --- a/templates/neo4j-parent/pyproject.toml +++ b/templates/neo4j-parent/pyproject.toml @@ -1,5 +1,5 @@ [tool.poetry] -name = "neo4j_parent" +name = "neo4j-parent" version = "0.1.0" description = "" authors = ["Tomaz Bratanic "] @@ -9,6 +9,8 @@ readme = "README.md" python = ">=3.8.1,<4.0" langchain = ">=0.0.320" tiktoken = "^0.5.1" +openai = "^0.28.1" +neo4j = "^5.14.0" [tool.langserve] export_module = "neo4j_parent" diff --git a/templates/rag-aws-bedrock/rag_aws_bedrock/chain.py b/templates/rag-aws-bedrock/rag_aws_bedrock/chain.py index eb5f7e051e7..d2a5e02ffa8 100644 --- a/templates/rag-aws-bedrock/rag_aws_bedrock/chain.py +++ b/templates/rag-aws-bedrock/rag_aws_bedrock/chain.py @@ -3,6 +3,7 @@ import os from langchain.embeddings import BedrockEmbeddings from langchain.llms.bedrock import Bedrock from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableParallel, RunnablePassthrough from langchain.vectorstores import FAISS @@ -46,4 +47,10 @@ chain = ( | prompt | model | StrOutputParser() -) \ No newline at end of file +) + +# Add typing for input +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) diff --git a/templates/rag-aws-kendra/rag_aws_kendra/chain.py b/templates/rag-aws-kendra/rag_aws_kendra/chain.py index d407e425555..f4be90a6d11 100644 --- a/templates/rag-aws-kendra/rag_aws_kendra/chain.py +++ b/templates/rag-aws-kendra/rag_aws_kendra/chain.py @@ -2,6 +2,7 @@ import os from langchain.llms.bedrock import Bedrock from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.retrievers import AmazonKendraRetriever from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableParallel, RunnablePassthrough @@ -46,4 +47,10 @@ chain = ( | prompt | model | StrOutputParser() -) \ No newline at end of file +) + +# Add typing for input +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) diff --git a/templates/rag-chroma-private/README.md b/templates/rag-chroma-private/README.md index e7f97795f04..a3b855a2fec 100644 --- a/templates/rag-chroma-private/README.md +++ b/templates/rag-chroma-private/README.md @@ -11,7 +11,7 @@ Follow instructions [here](https://python.langchain.com/docs/integrations/chat/o The instructions also show how to download your LLM of interest with Ollama: -* This template uses `llama2:13b-chat` +* This template uses `llama2:7b-chat` * But you can pick from many [here](https://ollama.ai/library) ## Set up local embeddings diff --git a/templates/rag-chroma-private/rag_chroma_private/chain.py b/templates/rag-chroma-private/rag_chroma_private/chain.py index 397a1589b67..b207d06033d 100644 --- a/templates/rag-chroma-private/rag_chroma_private/chain.py +++ b/templates/rag-chroma-private/rag_chroma_private/chain.py @@ -4,6 +4,7 @@ from langchain.chat_models import ChatOllama from langchain.document_loaders import WebBaseLoader from langchain.embeddings import GPT4AllEmbeddings from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableParallel, RunnablePassthrough from langchain.text_splitter import RecursiveCharacterTextSplitter @@ -39,7 +40,7 @@ prompt = ChatPromptTemplate.from_template(template) # LLM # Select the LLM that you downloaded -ollama_llm = "llama2:13b-chat" +ollama_llm = "llama2:7b-chat" model = ChatOllama(model=ollama_llm) # RAG chain @@ -49,3 +50,9 @@ chain = ( | model | StrOutputParser() ) + +# Add typing for input +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) diff --git a/templates/rag-chroma/rag_chroma/chain.py b/templates/rag-chroma/rag_chroma/chain.py index 96a46fd4c68..5e4b0aa8e13 100644 --- a/templates/rag-chroma/rag_chroma/chain.py +++ b/templates/rag-chroma/rag_chroma/chain.py @@ -1,6 +1,7 @@ from langchain.chat_models import ChatOpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableParallel, RunnablePassthrough from langchain.vectorstores import Chroma @@ -52,3 +53,10 @@ chain = ( | model | StrOutputParser() ) + +# Add typing for input +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) + diff --git a/templates/rag-conversation/rag_conversation/chain.py b/templates/rag-conversation/rag_conversation/chain.py index 3ed98288b4c..301c7d8263a 100644 --- a/templates/rag-conversation/rag_conversation/chain.py +++ b/templates/rag-conversation/rag_conversation/chain.py @@ -26,7 +26,7 @@ if os.environ.get("PINECONE_ENVIRONMENT", None) is None: PINECONE_INDEX_NAME = os.environ.get("PINECONE_INDEX", "langchain-test") ### Ingest code - you may need to run this the first time -# Load +# # Load # from langchain.document_loaders import WebBaseLoader # loader = WebBaseLoader("https://lilianweng.github.io/posts/2023-06-23-agent/") # data = loader.load() diff --git a/templates/rag-fusion/ingest.py b/templates/rag-fusion/ingest.py index 9604658e4da..769d2465577 100644 --- a/templates/rag-fusion/ingest.py +++ b/templates/rag-fusion/ingest.py @@ -1,9 +1,6 @@ -import pinecone from langchain.embeddings import OpenAIEmbeddings from langchain.vectorstores import Pinecone -pinecone.init(api_key="...", environment="...") - all_documents = { "doc1": "Climate change and economic impact.", "doc2": "Public health concerns due to climate change.", diff --git a/templates/rag-fusion/rag_fusion/chain.py b/templates/rag-fusion/rag_fusion/chain.py index 6d8f2892d2c..8d1830c74ff 100644 --- a/templates/rag-fusion/rag_fusion/chain.py +++ b/templates/rag-fusion/rag_fusion/chain.py @@ -1,8 +1,8 @@ -import pinecone from langchain import hub from langchain.chat_models import ChatOpenAI from langchain.embeddings import OpenAIEmbeddings from langchain.load import dumps, loads +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.vectorstores import Pinecone @@ -24,8 +24,6 @@ def reciprocal_rank_fusion(results: list[list], k=60): return reranked_results -pinecone.init(api_key="...", environment="...") - prompt = hub.pull("langchain-ai/rag-fusion-query-generation") generate_queries = ( @@ -41,3 +39,12 @@ chain = ( | retriever.map() | reciprocal_rank_fusion ) + +# Add typed inputs to chain for playground + + +class Question(BaseModel): + __root__: str + + +chain = chain.with_types(input_type=Question) diff --git a/templates/rewrite-retrieve-read/poetry.lock b/templates/rewrite-retrieve-read/poetry.lock index f83643f444a..0e48bdb0f0b 100644 --- a/templates/rewrite-retrieve-read/poetry.lock +++ b/templates/rewrite-retrieve-read/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.2.1" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -16,7 +15,6 @@ files = [ name = "aiohttp" version = "3.8.6" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -155,7 +151,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +172,6 @@ trio = ["trio (<0.22)"] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -189,7 +183,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -208,7 +201,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" -category = "main" optional = false python-versions = "*" files = [ @@ -301,7 +293,6 @@ files = [ name = "brotlicffi" version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -341,7 +332,6 @@ cffi = ">=1.0.0" name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -353,7 +343,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -418,7 +407,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -518,7 +506,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -533,7 +520,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -545,7 +531,6 @@ files = [ name = "dataclasses-json" version = "0.6.1" description = "Easily serialize dataclasses to and from JSON." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -561,7 +546,6 @@ typing-inspect = ">=0.4.0,<1" name = "duckduckgo-search" version = "3.9.3" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -582,7 +566,6 @@ dev = ["black (>=23.9.1)", "isort (>=5.12.0)", "pytest (>=7.4.2)", "pytest-async name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -597,7 +580,6 @@ test = ["pytest (>=6)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -668,7 +650,6 @@ files = [ name = "greenlet" version = "3.0.0" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -744,7 +725,6 @@ test = ["objgraph", "psutil"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -756,7 +736,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -772,7 +751,6 @@ hyperframe = ">=6.0,<7" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -784,7 +762,6 @@ files = [ name = "httpcore" version = "0.18.0" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -796,17 +773,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.25.0" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -822,19 +798,18 @@ h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} httpcore = ">=0.18.0,<0.19.0" idna = "*" sniffio = "*" -socksio = {version = ">=1.0.0,<2.0.0", optional = true, markers = "extra == \"socks\""} +socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""} [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -846,7 +821,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -858,7 +832,6 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -873,18 +846,17 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] name = "langchain" version = "0.0.316" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -925,7 +897,6 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] name = "langsmith" version = "0.0.44" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -941,7 +912,6 @@ requests = ">=2,<3" name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1049,7 +1019,6 @@ source = ["Cython (>=0.29.35)"] name = "marshmallow" version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1070,7 +1039,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1154,7 +1122,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1166,7 +1133,6 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1200,11 +1166,32 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "openai" +version = "0.28.1" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.28.1-py3-none-any.whl", hash = "sha256:d18690f9e3d31eedb66b57b88c2165d760b24ea0a01f150dd3f068155088ce68"}, + {file = "openai-0.28.1.tar.gz", hash = "sha256:4be1dad329a65b4ce1a660fe6d5431b438f429b5855c883435f0f7fcb6d2dcc8"}, +] + +[package.dependencies] +aiohttp = "*" +requests = ">=2.20" +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1216,7 +1203,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1228,7 +1214,6 @@ files = [ name = "pydantic" version = "2.4.2" description = "Data validation using Python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1248,7 +1233,6 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.10.1" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1367,7 +1351,6 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1376,6 +1359,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1383,8 +1367,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1401,6 +1392,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1408,6 +1400,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1417,7 +1410,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1439,7 +1431,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1451,7 +1442,6 @@ files = [ name = "socksio" version = "1.0.0" description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1463,7 +1453,6 @@ files = [ name = "sqlalchemy" version = "2.0.22" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1550,7 +1539,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1561,11 +1549,30 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1577,7 +1584,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -1593,7 +1599,6 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1611,7 +1616,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1698,4 +1702,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "0d7e52bfe3bb46c0af623eb3d498430a2b943ee8a68887e9274b7b4b7b044320" +content-hash = "34120d3b80ea28a1a3afdc497bda645809b2e9d3ea00a5b00cfab970f7a7ba07" diff --git a/templates/rewrite-retrieve-read/pyproject.toml b/templates/rewrite-retrieve-read/pyproject.toml index 23961b5d67d..5717f79a793 100644 --- a/templates/rewrite-retrieve-read/pyproject.toml +++ b/templates/rewrite-retrieve-read/pyproject.toml @@ -9,6 +9,7 @@ readme = "README.md" python = ">=3.8.1,<4.0" langchain = ">=0.0.313" duckduckgo-search = "^3.9.3" +openai = "^0.28.1" [tool.langserve] export_module = "rewrite_retrieve_read.chain" diff --git a/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py b/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py index 456db4204d3..4b8d06d16ce 100644 --- a/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py +++ b/templates/rewrite-retrieve-read/rewrite_retrieve_read/chain.py @@ -1,6 +1,6 @@ - from langchain.chat_models import ChatOpenAI from langchain.prompts import ChatPromptTemplate +from langchain.pydantic_v1 import BaseModel from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnablePassthrough from langchain.utilities import DuckDuckGoSearchAPIWrapper @@ -40,3 +40,10 @@ chain = { "context": {"x": RunnablePassthrough()} | rewriter | retriever, "question": RunnablePassthrough() } | prompt | model | StrOutputParser() + +# Add input type for playground + +class Question(BaseModel): + __root__: str + +chain = chain.with_types(input_type=Question) diff --git a/templates/stepback-qa-prompting/poetry.lock b/templates/stepback-qa-prompting/poetry.lock index f83643f444a..0e48bdb0f0b 100644 --- a/templates/stepback-qa-prompting/poetry.lock +++ b/templates/stepback-qa-prompting/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiofiles" version = "23.2.1" description = "File support for asyncio." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -16,7 +15,6 @@ files = [ name = "aiohttp" version = "3.8.6" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -125,7 +123,6 @@ speedups = ["Brotli", "aiodns", "cchardet"] name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -140,7 +137,6 @@ frozenlist = ">=1.1.0" name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -155,7 +151,6 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "anyio" version = "3.7.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +172,6 @@ trio = ["trio (<0.22)"] name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -189,7 +183,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -208,7 +201,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" -category = "main" optional = false python-versions = "*" files = [ @@ -301,7 +293,6 @@ files = [ name = "brotlicffi" version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -341,7 +332,6 @@ cffi = ">=1.0.0" name = "certifi" version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -353,7 +343,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -418,7 +407,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -518,7 +506,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -533,7 +520,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -545,7 +531,6 @@ files = [ name = "dataclasses-json" version = "0.6.1" description = "Easily serialize dataclasses to and from JSON." -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -561,7 +546,6 @@ typing-inspect = ">=0.4.0,<1" name = "duckduckgo-search" version = "3.9.3" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -582,7 +566,6 @@ dev = ["black (>=23.9.1)", "isort (>=5.12.0)", "pytest (>=7.4.2)", "pytest-async name = "exceptiongroup" version = "1.1.3" description = "Backport of PEP 654 (exception groups)" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -597,7 +580,6 @@ test = ["pytest (>=6)"] name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -668,7 +650,6 @@ files = [ name = "greenlet" version = "3.0.0" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -744,7 +725,6 @@ test = ["objgraph", "psutil"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -756,7 +736,6 @@ files = [ name = "h2" version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -772,7 +751,6 @@ hyperframe = ">=6.0,<7" name = "hpack" version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -784,7 +762,6 @@ files = [ name = "httpcore" version = "0.18.0" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -796,17 +773,16 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = ">=1.0.0,<2.0.0" +sniffio = "==1.*" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "httpx" version = "0.25.0" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -822,19 +798,18 @@ h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} httpcore = ">=0.18.0,<0.19.0" idna = "*" sniffio = "*" -socksio = {version = ">=1.0.0,<2.0.0", optional = true, markers = "extra == \"socks\""} +socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""} [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "hyperframe" version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false python-versions = ">=3.6.1" files = [ @@ -846,7 +821,6 @@ files = [ name = "idna" version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -858,7 +832,6 @@ files = [ name = "jsonpatch" version = "1.33" description = "Apply JSON-Patches (RFC 6902)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ @@ -873,18 +846,17 @@ jsonpointer = ">=1.9" name = "jsonpointer" version = "2.4" description = "Identify specific nodes in a JSON document (RFC 6901)" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] name = "langchain" version = "0.0.316" description = "Building applications with LLMs through composability" -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -925,7 +897,6 @@ text-helpers = ["chardet (>=5.1.0,<6.0.0)"] name = "langsmith" version = "0.0.44" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." -category = "main" optional = false python-versions = ">=3.8.1,<4.0" files = [ @@ -941,7 +912,6 @@ requests = ">=2,<3" name = "lxml" version = "4.9.3" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" files = [ @@ -1049,7 +1019,6 @@ source = ["Cython (>=0.29.35)"] name = "marshmallow" version = "3.20.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1070,7 +1039,6 @@ tests = ["pytest", "pytz", "simplejson"] name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1154,7 +1122,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1166,7 +1133,6 @@ files = [ name = "numpy" version = "1.24.4" description = "Fundamental package for array computing in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1200,11 +1166,32 @@ files = [ {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, ] +[[package]] +name = "openai" +version = "0.28.1" +description = "Python client library for the OpenAI API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.28.1-py3-none-any.whl", hash = "sha256:d18690f9e3d31eedb66b57b88c2165d760b24ea0a01f150dd3f068155088ce68"}, + {file = "openai-0.28.1.tar.gz", hash = "sha256:4be1dad329a65b4ce1a660fe6d5431b438f429b5855c883435f0f7fcb6d2dcc8"}, +] + +[package.dependencies] +aiohttp = "*" +requests = ">=2.20" +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + [[package]] name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1216,7 +1203,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1228,7 +1214,6 @@ files = [ name = "pydantic" version = "2.4.2" description = "Data validation using Python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1248,7 +1233,6 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.10.1" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1367,7 +1351,6 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1376,6 +1359,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1383,8 +1367,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1401,6 +1392,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1408,6 +1400,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1417,7 +1410,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1439,7 +1431,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1451,7 +1442,6 @@ files = [ name = "socksio" version = "1.0.0" description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1463,7 +1453,6 @@ files = [ name = "sqlalchemy" version = "2.0.22" description = "Database Abstraction Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1550,7 +1539,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1561,11 +1549,30 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] +[[package]] +name = "tqdm" +version = "4.66.1" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, + {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + [[package]] name = "typing-extensions" version = "4.8.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1577,7 +1584,6 @@ files = [ name = "typing-inspect" version = "0.9.0" description = "Runtime inspection utilities for typing module." -category = "main" optional = false python-versions = "*" files = [ @@ -1593,7 +1599,6 @@ typing-extensions = ">=3.7.4" name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1611,7 +1616,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "yarl" version = "1.9.2" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1698,4 +1702,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "0d7e52bfe3bb46c0af623eb3d498430a2b943ee8a68887e9274b7b4b7b044320" +content-hash = "34120d3b80ea28a1a3afdc497bda645809b2e9d3ea00a5b00cfab970f7a7ba07" diff --git a/templates/stepback-qa-prompting/pyproject.toml b/templates/stepback-qa-prompting/pyproject.toml index 9a5ce82c541..18b97762859 100644 --- a/templates/stepback-qa-prompting/pyproject.toml +++ b/templates/stepback-qa-prompting/pyproject.toml @@ -9,6 +9,7 @@ readme = "README.md" python = ">=3.8.1,<4.0" langchain = ">=0.0.313" duckduckgo-search = "^3.9.3" +openai = "^0.28.1" [tool.langserve] export_module = "stepback_qa_prompting.chain"