mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-06 05:25:04 +00:00
Format Templates (#12396)
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
import os
|
||||
|
||||
from langchain.document_loaders import JSONLoader
|
||||
from langchain.embeddings import HuggingFaceEmbeddings
|
||||
from langchain.vectorstores.elasticsearch import ElasticsearchStore
|
||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
||||
import os
|
||||
from langchain.vectorstores.elasticsearch import ElasticsearchStore
|
||||
|
||||
ELASTIC_CLOUD_ID = os.getenv("ELASTIC_CLOUD_ID")
|
||||
ELASTIC_USERNAME = os.getenv("ELASTIC_USERNAME", "elastic")
|
||||
|
@@ -23,7 +23,9 @@ if __name__ == "__main__":
|
||||
"question": follow_up_question,
|
||||
"chat_history": [
|
||||
"What is the nasa sales team?",
|
||||
"The sales team of NASA consists of Laura Martinez, the Area Vice-President of North America, and Gary Johnson, the Area Vice-President of South America. (Sales Organization Overview)",
|
||||
"The sales team of NASA consists of Laura Martinez, the Area "
|
||||
"Vice-President of North America, and Gary Johnson, the Area "
|
||||
"Vice-President of South America. (Sales Organization Overview)",
|
||||
],
|
||||
}
|
||||
)
|
||||
|
@@ -1,13 +1,15 @@
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain.schema.output_parser import StrOutputParser
|
||||
from langchain.schema.runnable import RunnablePassthrough, RunnableMap
|
||||
from langchain.embeddings import HuggingFaceEmbeddings
|
||||
from langchain.vectorstores.elasticsearch import ElasticsearchStore
|
||||
from langchain.schema import format_document
|
||||
from typing import Tuple, List
|
||||
from operator import itemgetter
|
||||
from .prompts import CONDENSE_QUESTION_PROMPT, LLM_CONTEXT_PROMPT, DOCUMENT_PROMPT
|
||||
from typing import List, Tuple
|
||||
|
||||
from langchain.chat_models import ChatOpenAI
|
||||
from langchain.embeddings import HuggingFaceEmbeddings
|
||||
from langchain.schema import format_document
|
||||
from langchain.schema.output_parser import StrOutputParser
|
||||
from langchain.schema.runnable import RunnableMap, RunnablePassthrough
|
||||
from langchain.vectorstores.elasticsearch import ElasticsearchStore
|
||||
|
||||
from .connection import es_connection_details
|
||||
from .prompts import CONDENSE_QUESTION_PROMPT, DOCUMENT_PROMPT, LLM_CONTEXT_PROMPT
|
||||
|
||||
# Setup connecting to Elasticsearch
|
||||
vectorstore = ElasticsearchStore(
|
||||
|
@@ -6,7 +6,7 @@ condense_question_prompt_template = """Given the following conversation and a fo
|
||||
Chat History:
|
||||
{chat_history}
|
||||
Follow Up Input: {question}
|
||||
"""
|
||||
""" # noqa: E501
|
||||
CONDENSE_QUESTION_PROMPT = PromptTemplate.from_template(
|
||||
condense_question_prompt_template
|
||||
)
|
||||
@@ -23,7 +23,7 @@ If you don't know the answer, just say that you don't know, don't try to make up
|
||||
{context}
|
||||
----
|
||||
Question: {question}
|
||||
"""
|
||||
""" # noqa: E501
|
||||
|
||||
LLM_CONTEXT_PROMPT = ChatPromptTemplate.from_template(llm_context_prompt_template)
|
||||
|
||||
|
Reference in New Issue
Block a user