From ef07308c304a90a8d2a4742cf499092a24658cc3 Mon Sep 17 00:00:00 2001 From: Srijan Dubey <87586713+Srijan-D@users.noreply.github.com> Date: Wed, 17 Jul 2024 01:37:36 +0530 Subject: [PATCH] Upgraded shaleprotocol to use langchain v0.2 removed deprecated classes (#24320) Description: Added support for langchain v0.2 for shale protocol. Replaced LLMChain with Runnable interface which allows any two Runnables to be 'chained' together into sequences. Also added StreamingStdOutCallbackHandler. Callback handler for streaming. Issue: None Dependencies: None. --- docs/docs/integrations/providers/shaleprotocol.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docs/docs/integrations/providers/shaleprotocol.md b/docs/docs/integrations/providers/shaleprotocol.md index eaafb4a3fd2..553778cedea 100644 --- a/docs/docs/integrations/providers/shaleprotocol.md +++ b/docs/docs/integrations/providers/shaleprotocol.md @@ -21,7 +21,7 @@ For example ```python from langchain_openai import OpenAI from langchain_core.prompts import PromptTemplate -from langchain.chains import LLMChain +from langchain_core.output_parsers import StrOutputParser import os os.environ['OPENAI_API_BASE'] = "https://shale.live/v1" @@ -35,10 +35,11 @@ template = """Question: {question} prompt = PromptTemplate.from_template(template) -llm_chain = LLMChain(prompt=prompt, llm=llm) + +llm_chain = prompt | llm | StrOutputParser() question = "What NFL team won the Super Bowl in the year Justin Beiber was born?" -llm_chain.run(question) +llm_chain.invoke(question) ```