Format Templates (#12396)

This commit is contained in:
Erick Friis
2023-10-26 19:44:30 -07:00
committed by GitHub
parent 25c98dbba9
commit 4b16601d33
59 changed files with 800 additions and 441 deletions

View File

@@ -1,8 +1,9 @@
import os
from langchain.document_loaders import JSONLoader
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores.elasticsearch import ElasticsearchStore
from langchain.text_splitter import RecursiveCharacterTextSplitter
import os
from langchain.vectorstores.elasticsearch import ElasticsearchStore
ELASTIC_CLOUD_ID = os.getenv("ELASTIC_CLOUD_ID")
ELASTIC_USERNAME = os.getenv("ELASTIC_USERNAME", "elastic")

View File

@@ -23,7 +23,9 @@ if __name__ == "__main__":
"question": follow_up_question,
"chat_history": [
"What is the nasa sales team?",
"The sales team of NASA consists of Laura Martinez, the Area Vice-President of North America, and Gary Johnson, the Area Vice-President of South America. (Sales Organization Overview)",
"The sales team of NASA consists of Laura Martinez, the Area "
"Vice-President of North America, and Gary Johnson, the Area "
"Vice-President of South America. (Sales Organization Overview)",
],
}
)

View File

@@ -1,13 +1,15 @@
from langchain.chat_models import ChatOpenAI
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnablePassthrough, RunnableMap
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.vectorstores.elasticsearch import ElasticsearchStore
from langchain.schema import format_document
from typing import Tuple, List
from operator import itemgetter
from .prompts import CONDENSE_QUESTION_PROMPT, LLM_CONTEXT_PROMPT, DOCUMENT_PROMPT
from typing import List, Tuple
from langchain.chat_models import ChatOpenAI
from langchain.embeddings import HuggingFaceEmbeddings
from langchain.schema import format_document
from langchain.schema.output_parser import StrOutputParser
from langchain.schema.runnable import RunnableMap, RunnablePassthrough
from langchain.vectorstores.elasticsearch import ElasticsearchStore
from .connection import es_connection_details
from .prompts import CONDENSE_QUESTION_PROMPT, DOCUMENT_PROMPT, LLM_CONTEXT_PROMPT
# Setup connecting to Elasticsearch
vectorstore = ElasticsearchStore(

View File

@@ -6,7 +6,7 @@ condense_question_prompt_template = """Given the following conversation and a fo
Chat History:
{chat_history}
Follow Up Input: {question}
"""
""" # noqa: E501
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(
condense_question_prompt_template
)
@@ -23,7 +23,7 @@ If you don't know the answer, just say that you don't know, don't try to make up
{context}
----
Question: {question}
"""
""" # noqa: E501
LLM_CONTEXT_PROMPT = ChatPromptTemplate.from_template(llm_context_prompt_template)