From fd7f129f1092cf37c7529957b983ddf4540a75ed Mon Sep 17 00:00:00 2001 From: unifyh <18213435+unifyh@users.noreply.github.com> Date: Tue, 10 Oct 2023 06:40:27 +0800 Subject: [PATCH] Docs: Fix broken line breaks in snippets (#11523) **Description:** This PR fix some code snippets that have raw `\n`'s instead of actual line breaks. **Issue:** Currently some snippets look like this: ![image](https://github.com/langchain-ai/langchain/assets/18213435/355b4911-38e9-4ba4-8570-f928557b6c13) Affected pages: - https://python.langchain.com/docs/integrations/providers/predictionguard#example-usage - https://python.langchain.com/docs/modules/agents/how_to/custom_llm_agent#set-up-environment - https://python.langchain.com/docs/modules/chains/foundational/llm_chain#get-started - https://python.langchain.com/docs/integrations/providers/shaleprotocol#how-to **Tag maintainer:** @hwchase17 --- .../docs/integrations/providers/predictionguard.mdx | 6 ++++-- .../docs/integrations/providers/shaleprotocol.md | 3 ++- docs/snippets/modules/agents/how_to/custom_llm_agent.mdx | 4 +++- docs/snippets/modules/chains/foundational/llm_chain.mdx | 4 +++- 4 files changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/docs_skeleton/docs/integrations/providers/predictionguard.mdx b/docs/docs_skeleton/docs/integrations/providers/predictionguard.mdx index f953e37effe..0dfb744585c 100644 --- a/docs/docs_skeleton/docs/integrations/providers/predictionguard.mdx +++ b/docs/docs_skeleton/docs/integrations/providers/predictionguard.mdx @@ -37,7 +37,8 @@ import os import predictionguard as pg from langchain.llms import PredictionGuard -from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain +from langchain.prompts import PromptTemplate +from langchain.chains import LLMChain # Your Prediction Guard API key. Get one at predictionguard.com os.environ["PREDICTIONGUARD_TOKEN"] = "" @@ -76,7 +77,8 @@ Basic LLM Chaining with the Prediction Guard wrapper: ```python import os -from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain +from langchain.prompts import PromptTemplate +from langchain.chains import LLMChain from langchain.llms import PredictionGuard # Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows diff --git a/docs/docs_skeleton/docs/integrations/providers/shaleprotocol.md b/docs/docs_skeleton/docs/integrations/providers/shaleprotocol.md index 332963050e3..d2b1bfe3681 100644 --- a/docs/docs_skeleton/docs/integrations/providers/shaleprotocol.md +++ b/docs/docs_skeleton/docs/integrations/providers/shaleprotocol.md @@ -20,7 +20,8 @@ As of June 2023, the API supports Vicuna-13B by default. We are going to support For example ```python from langchain.llms import OpenAI -from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain +from langchain.prompts import PromptTemplate +from langchain.chains import LLMChain import os os.environ['OPENAI_API_BASE'] = "https://shale.live/v1" diff --git a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx index 073a571f208..d3b048f8d8c 100644 --- a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx +++ b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx @@ -20,7 +20,9 @@ Do necessary imports, etc. ```python from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.prompts import StringPromptTemplate -from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain +from langchain.llms import OpenAI +from langchain.utilities import SerpAPIWrapper +from langchain.chains import LLMChain from typing import List, Union from langchain.schema import AgentAction, AgentFinish, OutputParserException import re diff --git a/docs/snippets/modules/chains/foundational/llm_chain.mdx b/docs/snippets/modules/chains/foundational/llm_chain.mdx index ac680d9b5bb..4c8fb035d02 100644 --- a/docs/snippets/modules/chains/foundational/llm_chain.mdx +++ b/docs/snippets/modules/chains/foundational/llm_chain.mdx @@ -1,5 +1,7 @@ ```python -from langchain.prompts import PromptTemplate\nfrom langchain.llms import OpenAI\nfrom langchain.chains import LLMChain +from langchain.prompts import PromptTemplate +from langchain.llms import OpenAI +from langchain.chains import LLMChain prompt_template = "What is a good name for a company that makes {product}?"