mirror of
https://github.com/hwchase17/langchain.git
synced 2026-01-29 21:30:18 +00:00
add few shot example (#148)
This commit is contained in:
@@ -2,14 +2,14 @@
|
||||
import pytest
|
||||
|
||||
from langchain.chains.llm import LLMChain
|
||||
from langchain.prompts.prompt import Prompt
|
||||
from langchain.prompts.prompt import PromptTemplate
|
||||
from tests.unit_tests.llms.fake_llm import FakeLLM
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def fake_llm_chain() -> LLMChain:
|
||||
"""Fake LLM chain for testing purposes."""
|
||||
prompt = Prompt(input_variables=["bar"], template="This is a {bar}:")
|
||||
prompt = PromptTemplate(input_variables=["bar"], template="This is a {bar}:")
|
||||
return LLMChain(prompt=prompt, llm=FakeLLM(), output_key="text1")
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import pytest
|
||||
|
||||
from langchain.chains.mrkl.base import ChainConfig, MRKLChain, get_action_and_input
|
||||
from langchain.chains.mrkl.prompt import BASE_TEMPLATE
|
||||
from langchain.prompts import Prompt
|
||||
from langchain.prompts import PromptTemplate
|
||||
from tests.unit_tests.llms.fake_llm import FakeLLM
|
||||
|
||||
|
||||
@@ -66,5 +66,5 @@ def test_from_chains() -> None:
|
||||
tools=expected_tools_prompt, tool_names=expected_tool_names
|
||||
)
|
||||
prompt = mrkl_chain.prompt
|
||||
assert isinstance(prompt, Prompt)
|
||||
assert isinstance(prompt, PromptTemplate)
|
||||
assert prompt.template == expected_template
|
||||
|
||||
@@ -9,7 +9,7 @@ from langchain.chains.react.base import ReActChain, predict_until_observation
|
||||
from langchain.docstore.base import Docstore
|
||||
from langchain.docstore.document import Document
|
||||
from langchain.llms.base import LLM
|
||||
from langchain.prompts.prompt import Prompt
|
||||
from langchain.prompts.prompt import PromptTemplate
|
||||
|
||||
_PAGE_CONTENT = """This is a page about LangChain.
|
||||
|
||||
@@ -19,7 +19,7 @@ What isn't there to love about langchain?
|
||||
|
||||
Made in 2022."""
|
||||
|
||||
_FAKE_PROMPT = Prompt(input_variables=["input"], template="{input}")
|
||||
_FAKE_PROMPT = PromptTemplate(input_variables=["input"], template="{input}")
|
||||
|
||||
|
||||
class FakeListLLM(LLM):
|
||||
|
||||
Reference in New Issue
Block a user