mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-04 20:46:45 +00:00
notebook fmt (#12498)
This commit is contained in:
@@ -19,6 +19,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.document_loaders import WebBaseLoader\n",
|
||||
"\n",
|
||||
"loader = WebBaseLoader(\"https://lilianweng.github.io/posts/2023-06-23-agent/\")\n",
|
||||
"text = loader.load()"
|
||||
]
|
||||
@@ -45,8 +46,9 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langserve.client import RemoteRunnable\n",
|
||||
"llama2_function = RemoteRunnable('http://0.0.0.0:8001/llama2_functions')\n",
|
||||
"llama2_function.invoke({\"input\":text[0].page_content[0:1500]})"
|
||||
"\n",
|
||||
"llama2_function = RemoteRunnable(\"http://0.0.0.0:8001/llama2_functions\")\n",
|
||||
"llama2_function.invoke({\"input\": text[0].page_content[0:1500]})"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@@ -1,3 +1,3 @@
|
||||
from llama2_functions.chain import chain
|
||||
|
||||
__all__ = ["chain"]
|
||||
__all__ = ["chain"]
|
||||
|
@@ -5,9 +5,7 @@ from langchain.prompts import ChatPromptTemplate
|
||||
replicate_id = "andreasjansson/llama-2-13b-chat-gguf:60ec5dda9ff9ee0b6f786c9d1157842e6ab3cc931139ad98fe99e08a35c5d4d4" # noqa: E501
|
||||
model = Replicate(
|
||||
model=replicate_id,
|
||||
model_kwargs={"temperature": 0.8,
|
||||
"max_length": 500,
|
||||
"top_p": 0.95},
|
||||
model_kwargs={"temperature": 0.8, "max_length": 500, "top_p": 0.95},
|
||||
)
|
||||
|
||||
# Prompt with output schema specification
|
||||
@@ -39,7 +37,4 @@ Respond with json that adheres to the following jsonschema:
|
||||
prompt = ChatPromptTemplate.from_messages([("system", template), ("human", "{input}")])
|
||||
|
||||
# Chain
|
||||
chain = (
|
||||
prompt
|
||||
| model
|
||||
)
|
||||
chain = prompt | model
|
||||
|
Reference in New Issue
Block a user