Callbacks Refactor [base] (#3256)

Co-authored-by: Nuno Campos <nuno@boringbits.io>
Co-authored-by: Davis Chase <130488702+dev2049@users.noreply.github.com>
Co-authored-by: Zander Chase <130414180+vowelparrot@users.noreply.github.com>
Co-authored-by: Harrison Chase <hw.chase.17@gmail.com>
This commit is contained in:
Ankush Gola
2023-04-30 11:14:09 -07:00
committed by GitHub
parent 18ec22fe56
commit d3ec00b566
208 changed files with 6394 additions and 3353 deletions

View File

@@ -10,7 +10,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 9,
"metadata": {},
"outputs": [
{
@@ -37,7 +37,7 @@
"'Hello World\\n'"
]
},
"execution_count": 1,
"execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
@@ -50,7 +50,7 @@
"\n",
"text = \"Please write a bash script that prints 'Hello World' to the console.\"\n",
"\n",
"bash_chain = LLMBashChain(llm=llm, verbose=True)\n",
"bash_chain = LLMBashChain.from_llm(llm, verbose=True)\n",
"\n",
"bash_chain.run(text)"
]
@@ -65,11 +65,12 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"from langchain.prompts.prompt import PromptTemplate\n",
"from langchain.chains.llm_bash.prompt import BashOutputParser\n",
"\n",
"_PROMPT_TEMPLATE = \"\"\"If someone asks you to perform a task, your job is to come up with a series of bash commands that will perform the task. There is no need to put \"#!/bin/bash\" in your answer. Make sure to reason step by step, using this format:\n",
"Question: \"copy the files in the directory named 'target' into a new directory at the same level as target called 'myNewDirectory'\"\n",
@@ -88,12 +89,12 @@
"That is the format. Begin!\n",
"Question: {question}\"\"\"\n",
"\n",
"PROMPT = PromptTemplate(input_variables=[\"question\"], template=_PROMPT_TEMPLATE)"
"PROMPT = PromptTemplate(input_variables=[\"question\"], template=_PROMPT_TEMPLATE, output_parser=BashOutputParser())"
]
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 11,
"metadata": {},
"outputs": [
{
@@ -120,13 +121,13 @@
"'Hello World\\n'"
]
},
"execution_count": 3,
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"bash_chain = LLMBashChain(llm=llm, prompt=PROMPT, verbose=True)\n",
"bash_chain = LLMBashChain.from_llm(llm, prompt=PROMPT, verbose=True)\n",
"\n",
"text = \"Please write a bash script that prints 'Hello World' to the console.\"\n",
"\n",
@@ -134,7 +135,6 @@
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
@@ -145,7 +145,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 12,
"metadata": {},
"outputs": [
{
@@ -177,7 +177,7 @@
"'api.ipynb\\t\\t\\tllm_summarization_checker.ipynb\\r\\nconstitutional_chain.ipynb\\tmoderation.ipynb\\r\\nllm_bash.ipynb\\t\\t\\topenai_openapi.yaml\\r\\nllm_checker.ipynb\\t\\topenapi.ipynb\\r\\nllm_math.ipynb\\t\\t\\tpal.ipynb\\r\\nllm_requests.ipynb\\t\\tsqlite.ipynb'"
]
},
"execution_count": 4,
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
@@ -187,7 +187,7 @@
"\n",
"\n",
"persistent_process = BashProcess(persistent=True)\n",
"bash_chain = LLMBashChain.from_bash_process(llm=llm, bash_process=persistent_process, verbose=True)\n",
"bash_chain = LLMBashChain.from_llm(llm, bash_process=persistent_process, verbose=True)\n",
"\n",
"text = \"List the current directory then move up a level.\"\n",
"\n",
@@ -196,7 +196,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": 13,
"metadata": {},
"outputs": [
{
@@ -224,7 +224,7 @@
"'examples\\t\\tgetting_started.ipynb\\tindex_examples\\r\\ngeneric\\t\\t\\thow_to_guides.rst'"
]
},
"execution_count": 5,
"execution_count": 13,
"metadata": {},
"output_type": "execute_result"
}
@@ -258,7 +258,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.16"
"version": "3.9.1"
}
},
"nbformat": 4,