Compare commits

...

8 Commits

Author SHA1 Message Date
Bagatur
e9e26b24b0 Merge branch 'bagatur/rfc_smithify_docs' of github.com:langchain-ai/langchain into bagatur/rfc_smithify_docs 2024-06-04 14:48:45 -07:00
Bagatur
d3329cf2a3 fmt 2024-06-04 14:48:35 -07:00
Erick Friis
b559ef5e72 x 2024-05-20 12:53:48 -07:00
Erick Friis
c38b2107c8 Merge branch 'master' into bagatur/rfc_smithify_docs 2024-05-20 12:51:09 -07:00
Bagatur
591e37e2d0 fmt 2024-05-20 12:17:34 -07:00
Bagatur
9748825cec fmt 2024-05-20 12:08:58 -07:00
Bagatur
30c7049d52 fmt 2024-05-20 12:04:14 -07:00
Bagatur
ff074fdd23 docs: use langsmith style 2024-05-20 11:49:31 -07:00
15 changed files with 15247 additions and 14306 deletions

View File

@@ -26,12 +26,21 @@
"@docusaurus/preset-classic": "2.4.3",
"@docusaurus/remark-plugin-npm2yarn": "^2.4.3",
"@docusaurus/theme-mermaid": "2.4.3",
"@emotion/react": "^11.11.0",
"@emotion/styled": "^11.11.0",
"@mdx-js/react": "^1.6.22",
"@mui/icons-material": "^5.11.16",
"@mui/joy": "^5.0.0-alpha.81",
"@scalar/docusaurus": "^0.2.26",
"@supabase/supabase-js": "^2.39.7",
"clsx": "^1.2.1",
"cookie": "^0.6.0",
"isomorphic-dompurify": "^2.7.0",
"dedent": "^1.5.3",
"dompurify": "^3.1.0",
"isomorphic-dompurify": "^2.9.0",
"json-loader": "^0.5.7",
"marked": "^12.0.1",
"prettier": "^2.7.1",
"process": "^0.11.10",
"react": "^17.0.2",
"react-dom": "^17.0.2",
@@ -40,7 +49,10 @@
},
"devDependencies": {
"@babel/eslint-parser": "^7.18.2",
"@docusaurus/module-type-aliases": "^2.4.1",
"@langchain/scripts": "^0.0.10",
"@tsconfig/docusaurus": "^1.0.7",
"@types/jest": "^29.5.3",
"docusaurus-plugin-typedoc": "next",
"dotenv": "^16.4.5",
"eslint": "^8.19.0",
@@ -51,14 +63,19 @@
"eslint-plugin-jsx-a11y": "^6.6.0",
"eslint-plugin-react": "^7.30.1",
"eslint-plugin-react-hooks": "^4.6.0",
"jest": "^29.6.2",
"marked": "^12.0.1",
"langchain": "^0.0.179",
"langsmith": "^0.0.47",
"prettier": "^2.7.1",
"supabase": "^1.148.6",
"ts-jest": "^29.1.1",
"typedoc": "^0.24.4",
"typedoc-plugin-markdown": "next",
"typescript": "^5.1.6",
"yaml-loader": "^0.8.0"
},
"resolutions": {
"resolutions": {
"cytoscape": "3.28.1"
},
"browserslist": {

View File

@@ -0,0 +1,36 @@
import React from "react";
import { CodeTabs } from "./InstructionsWithCode";
export function ClientInstallationCodeTabs() {
return (
<CodeTabs
groupId="client-language"
tabs={[
{
value: "python",
label: "pip",
language: "bash",
content: `pip install -U langsmith`,
},
{
value: "typescript",
label: "yarn",
language: "bash",
content: `yarn add langsmith`,
},
{
value: "npm",
label: "npm",
language: "bash",
content: `npm install -S langsmith`,
},
{
value: "pnpm",
label: "pnpm",
language: "bash",
content: `pnpm add langsmith`,
},
]}
/>
);
}

128
docs/src/components/Hub.js Normal file
View File

@@ -0,0 +1,128 @@
import React from "react";
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
import CodeBlock from "@theme/CodeBlock";
import { CodeTabs } from "./InstructionsWithCode";
export function HubInstallationCodeTabs() {
return (
<CodeTabs
groupId="client-language"
tabs={[
{
value: "python",
label: "pip",
language: "bash",
content: `pip install -U langchain langchainhub langchain-openai`,
},
{
value: "typescript",
label: "yarn",
language: "bash",
content: `yarn add langchain`,
},
{
value: "npm",
label: "npm",
language: "bash",
content: `npm install -S langchain`,
},
]}
/>
);
}
export function HubPullCodeTabs() {
const pyBlock = `from langchain import hub
# pull a chat prompt
prompt = hub.pull("efriis/my-first-prompt")
# create a model to use it with
from langchain_openai import ChatOpenAI
model = ChatOpenAI()
# use it in a runnable
runnable = prompt | model
response = runnable.invoke({
"profession": "biologist",
"question": "What is special about parrots?",
})
print(response)
`;
const jsBlock = `// import
import * as hub from "langchain/hub";
import { ChatPromptTemplate } from "@langchain/core/prompts";
import { ChatOpenAI } from "@langchain/openai";
// pull a chat prompt
const prompt = await hub.pull<ChatPromptTemplate>("efriis/my-first-prompt");
// create a model to use it with
const model = new ChatOpenAI();
// use it in a runnable
const runnable = prompt.pipe(model);
const result = await runnable.invoke({
"profession": "biologist",
"question": "What is special about parrots?",
});
console.log(result);`;
return (
<Tabs groupId="client-language">
<TabItem key="python" value="python" label="Python">
<CodeBlock className="python" language="python">
{pyBlock}
</CodeBlock>
</TabItem>
<TabItem key="typescript" value="typescript" label="TypeScript">
<CodeBlock className="typescript" language="typescript">
{jsBlock}
</CodeBlock>
</TabItem>
</Tabs>
);
}
export function HubPushCodeTabs() {
const pyBlock = `from langchain import hub
from langchain.prompts.chat import ChatPromptTemplate
prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
hub.push("<handle>/topic-joke-generator", prompt, new_repo_is_public=False)`;
const jsBlock = `import * as hub from "langchain/hub";
import {
ChatPromptTemplate,
HumanMessagePromptTemplate,
} from '@langchain/core/prompts';
const message = HumanMessagePromptTemplate.fromTemplate(
'tell me a joke about {topic}'
);
const prompt = ChatPromptTemplate.fromMessages([message]);
await hub.push("<handle>/my-first-prompt", prompt, { newRepoIsPublic: false });`;
return (
<Tabs groupId="client-language">
<TabItem key="python" value="python" label="Python">
<CodeBlock className="python" language="python">
{pyBlock}
</CodeBlock>
</TabItem>
<TabItem key="typescript" value="typescript" label="TypeScript">
<CodeBlock className="typescript" language="typescript">
{jsBlock}
</CodeBlock>
</TabItem>
</Tabs>
);
}

View File

@@ -0,0 +1,202 @@
import React from "react";
import Tabs from "@theme/Tabs";
import TabItem from "@theme/TabItem";
import CodeBlock from "@theme/CodeBlock";
import { marked } from "marked";
import DOMPurify from "isomorphic-dompurify";
import prettier from "prettier";
import dedent from "dedent";
import parserTypeScript from "prettier/parser-typescript";
export function LangChainPyBlock(content) {
return {
value: "langchain-py",
label: "LangChain (Python)",
content,
language: "python",
};
}
export function LangChainJSBlock(content) {
return {
value: "langchain-js",
label: "LangChain (JS)",
content,
language: "typescript",
};
}
export function TypeScriptBlock(content, caption = "", label = "TypeScript") {
return {
value: "typescript",
label,
content,
caption,
language: "typescript",
};
}
export function PythonBlock(content, caption = "", label = "Python") {
return {
value: "python",
label,
content,
caption,
language: "python",
};
}
export function APIBlock(content, caption = "") {
return {
value: "api",
label: "API (Using Python Requests)",
content,
caption,
language: "python",
};
}
export function ShellBlock(content, value = "shell", label = "Shell") {
return {
value,
label,
content,
language: "shell",
};
}
/**
* @param {string} code
* @param {"typescript" | "python"} language
* @returns {string} The formatted code
*/
function formatCode(code, language) {
const languageLower = language.toLowerCase();
if (languageLower === "python") {
// no-op - Do not format Python code at this time
return code;
}
try {
const formattedCode = prettier.format(code, {
parser: languageLower,
plugins: [parserTypeScript],
});
return formattedCode;
} catch (_) {
// no-op
}
// If formatting fails, return as is
return code;
}
export function CodeTabs({ tabs, groupId }) {
return (
<Tabs groupId={groupId} className="code-tabs">
{tabs.map((tab, index) => {
const key = `${groupId}-${index}`;
return (
<TabItem key={key} value={tab.value} label={tab.label}>
{tab.caption && (
<div
className="code-caption"
// eslint-disable-next-line react/no-danger
dangerouslySetInnerHTML={{
__html: DOMPurify.sanitize(marked.parse(tab.caption)),
}}
/>
)}
<CodeBlock
className={tab.value}
language={tab.language ?? tab.value}
>
{formatCode(tab.content, tab.language ?? tab.value)}
</CodeBlock>
</TabItem>
);
})}
</Tabs>
);
}
export const typescript = (strings, ...values) => {
if (
values.length === 0 &&
typeof strings === "object" &&
strings != null &&
!Array.isArray(strings)
) {
const { caption, label } = strings;
return (innerStrings, ...innerValues) => {
let result = "";
innerStrings.forEach((string, i) => {
result += string + String(innerValues[i] ?? "");
});
return TypeScriptBlock(
dedent(result),
caption ?? undefined,
label ?? undefined
);
};
}
let result = "";
strings.forEach((string, i) => {
result += string + String(values[i] ?? "");
});
return TypeScriptBlock(dedent(result));
};
export const python = (strings, ...values) => {
if (
values.length === 0 &&
typeof strings === "object" &&
strings != null &&
!Array.isArray(strings)
) {
const { caption, label } = strings;
return (innerStrings, ...innerValues) => {
let result = "";
innerStrings.forEach((string, i) => {
result += string + String(innerValues[i] ?? "");
});
return PythonBlock(
dedent(result),
caption ?? undefined,
label ?? undefined
);
};
}
let result = "";
strings.forEach((string, i) => {
result += string + String(values[i] ?? "");
});
return PythonBlock(dedent(result));
};
export const shell = (strings, ...values) => {
if (
values.length === 0 &&
typeof strings === "object" &&
strings != null &&
!Array.isArray(strings)
) {
const { value, label } = strings;
return (innerStrings, ...innerValues) => {
let result = "";
innerStrings.forEach((string, i) => {
result += string + String(innerValues[i] ?? "");
});
return ShellBlock(dedent(result), value ?? undefined, label ?? undefined);
};
}
let result = "";
strings.forEach((string, i) => {
result += string + String(values[i] ?? "");
});
return ShellBlock(dedent(result));
};

View File

@@ -0,0 +1,439 @@
import CodeBlock from "@theme/CodeBlock";
import React from "react";
import TabItem from "@theme/TabItem";
import Tabs from "@theme/Tabs";
import {
CodeTabs,
PythonBlock,
ShellBlock,
TypeScriptBlock,
} from "./InstructionsWithCode";
export const TypeScriptSDKTracingCode = () =>
`import { OpenAI } from "openai";
import { traceable } from "langsmith/traceable";
import { wrapOpenAI } from "langsmith/wrappers";\n
// Auto-trace LLM calls in-context
const client = wrapOpenAI(new OpenAI());
// Auto-trace this function
const pipeline = traceable(async (user_input) => {
const result = await client.chat.completions.create({
messages: [{ role: "user", content: user_input }],
model: "gpt-3.5-turbo",
});
return result.choices[0].message.content;
});
await pipeline("Hello, world!")
// Out: Hello there! How can I assist you today?`;
export function TypeScriptSDKTracingCodeBlock() {
return (
<CodeBlock language="typescript">{TypeScriptSDKTracingCode()}</CodeBlock>
);
}
export function PythonAPITracingCodeBlock() {
return (
<CodeBlock language="python">
{`import openai
import requests
from datetime import datetime
from uuid import uuid4
def post_run(run_id, name, run_type, inputs, parent_id=None):
"""Function to post a new run to the API."""
data = {
"id": run_id.hex,
"name": name,
"run_type": run_type,
"inputs": inputs,
"start_time": datetime.utcnow().isoformat(),
}
if parent_id:
data["parent_run_id"] = parent_id.hex
requests.post(
"https://api.smith.langchain.com/runs",
json=data,
headers=headers
)
def patch_run(run_id, outputs):
"""Function to patch a run with outputs."""
requests.patch(
f"https://api.smith.langchain.com/runs/{run_id}",
json={
"outputs": outputs,
"end_time": datetime.utcnow().isoformat(),
},
headers=headers,
)
# Send your API Key in the request headers
headers = {"x-api-key": "<YOUR API KEY>"}
# This can be a user input to your app
question = "Can you summarize this morning's meetings?"
# This can be retrieved in a retrieval step
context = "During this morning's meeting, we solved all world conflict."
messages = [
{"role": "system", "content": "You are a helpful assistant. Please respond to the user's request only based on the given context."},
{"role": "user", "content": f"Question: {question}\\nContext: {context}"}
]
# Create parent run
parent_run_id = uuid4()
post_run(parent_run_id, "Chat Pipeline", "chain", {"question": question})
# Create child run
child_run_id = uuid4()
post_run(child_run_id, "OpenAI Call", "llm", {"messages": messages}, parent_run_id)
# Generate a completion
client = openai.Client()
chat_completion = client.chat.completions.create(model="gpt-3.5-turbo", messages=messages)
# End runs
patch_run(child_run_id, chat_completion.dict())
patch_run(parent_run_id, {"answer": chat_completion.choices[0].message.content})`}
</CodeBlock>
);
}
export const PythonSDKTracingCode = () =>
`import openai
from langsmith.wrappers import wrap_openai
from langsmith import traceable\n
# Auto-trace LLM calls in-context
client = wrap_openai(openai.Client())\n
@traceable # Auto-trace this function
def pipeline(user_input: str):
result = client.chat.completions.create(
messages=[{"role": "user", "content": user_input}],
model="gpt-3.5-turbo"
)
return result.choices[0].message.content\n
pipeline("Hello, world!")
# Out: Hello there! How can I assist you today?`;
export function PythonSDKTracingCodeBlock() {
return <CodeBlock language="python">{PythonSDKTracingCode()}</CodeBlock>;
}
export function LangChainInstallationCodeTabs() {
return (
<CodeTabs
groupId="client-language"
tabs={[
{
value: "python",
label: "pip",
language: "bash",
content: `pip install langchain_openai langchain_core`,
},
{
value: "typescript",
label: "yarn",
language: "bash",
content: `yarn add @langchain/openai @langchain/core`,
},
{
value: "npm",
label: "npm",
language: "bash",
content: `npm install @langchain/openai @langchain/core`,
},
{
value: "pnpm",
label: "pnpm",
language: "bash",
content: `pnpm add @langchain/openai @langchain/core`,
},
]}
/>
);
}
export function ConfigureSDKEnvironmentCodeTabs({}) {
return (
<CodeTabs
tabs={[
ShellBlock(`export LANGCHAIN_TRACING_V2=true
export LANGCHAIN_API_KEY=<your-api-key>
# The below examples use the OpenAI API, though it's not necessary in general
export OPENAI_API_KEY=<your-openai-api-key>`),
]}
groupId="client-language"
/>
);
}
export function ConfigureEnvironmentCodeTabs({}) {
return (
<CodeTabs
tabs={[
ShellBlock(`export LANGCHAIN_TRACING_V2=true
export LANGCHAIN_API_KEY=<your-api-key>
# The below examples use the OpenAI API, though it's not necessary in general
export OPENAI_API_KEY=<your-openai-api-key>`),
]}
groupId="client-language"
/>
);
}
export function LangChainQuickStartCodeTabs({}) {
const simpleTSBlock = `import { ChatOpenAI } from "@langchain/openai";
import { ChatPromptTemplate } from "@langchain/core/prompts";
import { StringOutputParser } from "@langchain/core/output_parsers";
const prompt = ChatPromptTemplate.fromMessages([
["system", "You are a helpful assistant. Please respond to the user's request only based on the given context."],
["user", "Question: {question}\\nContext: {context}"],
]);
const model = new ChatOpenAI({ modelName: "gpt-3.5-turbo" });
const outputParser = new StringOutputParser();
const chain = prompt.pipe(model).pipe(outputParser);
const question = "Can you summarize this morning's meetings?"
const context = "During this morning's meeting, we solved all world conflict."
await chain.invoke({ question: question, context: context });`;
const alternativeTSBlock = `import { Client } from "langsmith";
import { LangChainTracer } from "langchain/callbacks";
const client = new Client({
apiUrl: "https://api.smith.langchain.com",
apiKey: "YOUR_API_KEY"
});
const tracer = new LangChainTracer({
projectName: "YOUR_PROJECT_NAME",
client
});
const model = new ChatOpenAI({
openAIApiKey: "YOUR_OPENAI_API_KEY"
});
await model.invoke("Hello, world!", { callbacks: [tracer] })`;
return (
<Tabs groupId="client-language" className="code-tabs">
<TabItem key="python" value="python" label="Python">
<CodeBlock className="python" language="python">
{`from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant. Please respond to the user's request only based on the given context."),
("user", "Question: {question}\\nContext: {context}")
])
model = ChatOpenAI(model="gpt-3.5-turbo")
output_parser = StrOutputParser()
chain = prompt | model | output_parser
question = "Can you summarize this morning's meetings?"
context = "During this morning's meeting, we solved all world conflict."
chain.invoke({"question": question, "context": context})`}
</CodeBlock>
</TabItem>
<TabItem key="typescript" value="typescript" label="TypeScript">
<CodeBlock className="typescript" language="typescript">
{simpleTSBlock}
</CodeBlock>
</TabItem>
</Tabs>
);
}
const TraceableQuickStart = PythonBlock(`from typing import Any, Iterable\n
import openai
from langsmith import traceable
from langsmith.wrappers import wrap_openai\n
# Optional: wrap the openai client to add tracing directly
client = wrap_openai(openai.Client())\n\n
@traceable(run_type="tool")
def my_tool() -> str:
return "In the meeting, we solved all world conflict."\n\n
@traceable
def my_chat_bot(prompt: str) -> Iterable[str]:
tool_response = my_tool()
messages = [
{
"role": "system",
"content": f"You are an AI Assistant.\\n\\nTool response: {tool_response}",
},
{"role": "user", "content": prompt},
]
chunks = client.chat.completions.create(
model="gpt-3.5-turbo", messages=messages, stream=True
)
for chunk in chunks:
yield chunk.choices[0].delta.content\n\n
for tok in my_chat_bot("Summarize this morning's meetings."):
print(tok, end="")
# See an example run at: https://smith.langchain.com/public/3e853ad8-77ce-404d-ad4c-05726851ad0f/r`);
export function TraceableQuickStartCodeBlock({}) {
return (
<CodeBlock
className={TraceableQuickStart.value}
language={TraceableQuickStart.language ?? TraceableQuickStart.value}
>
{TraceableQuickStart.content}
</CodeBlock>
);
}
export function TraceableThreadingCodeBlock({}) {
return (
<CodeBlock
className={TraceableQuickStart.value}
language={TraceableQuickStart.language ?? TraceableQuickStart.value}
>
{`import asyncio
import datetime
from concurrent.futures import ThreadPoolExecutor
from typing import Any, Dict, List\n
import openai
from langsmith.wrappers import wrap_openai
from langsmith import traceable, RunTree\n\n
# Optional: wrap the openai client to add tracing directly
client = wrap_openai(openai.Client())\n
def call_llm(prompt: str, temperature: float = 0.0, **kwargs: Any):
"""Call a completion model."""
\n\n
@traceable(run_type="chain")
def llm_chain(user_input: str, **kwargs: Any) -> str:
"""Select the text from the openai call."""
return client.completions.create(
model="gpt-3.5-turbo-instruct", prompt=user_input, temperature=1.0, **kwargs
).choices[0].text\n\n
@traceable(run_type="llm")
def my_chat_model(messages: List[Dict], temperature: float = 0.0, **kwargs: Any):
"""Call a chat model."""
return client.chat.completions.create(
model="gpt-3.5-turbo", messages=messages, temperature=temperature, **kwargs
)\n\n
@traceable(run_type="chain")
def llm_chat_chain(user_input: str, **kwargs: Any) -> str:
"""Prepare prompt & select first choice response."""
messages = [
{
"role": "system",
"content": "You are an AI Assistant. The time is "
+ str(datetime.datetime.now()),
},
{"role": "user", "content": user_input},
]
return my_chat_model(messages=messages, **kwargs).choices[0].message.content\n\n
@traceable(run_type="chain")
# highlight-next-line
async def nested_chain(text: str, run_tree: RunTree, **kwargs: Any) -> str:
"""Example with nesting and thread pools."""
futures = []
with ThreadPoolExecutor() as thread_pool:
for i in range(2):
futures.append(
thread_pool.submit(
llm_chain,
f"Completion gather {i}: {text}",
# highlight-next-line
langsmith_extra={"run_tree": run_tree},
**kwargs,
)
)
for i in range(2):
futures.append(
thread_pool.submit(
llm_chat_chain,
f"Chat gather {i}: {text}",
# highlight-next-line
langsmith_extra={"run_tree": run_tree},
**kwargs,
)
)
return "\\n".join([future.result() for future in futures])\n\n
asyncio.run(nested_chain("Summarize meeting"))`}
</CodeBlock>
);
}
export function RunTreeQuickStartCodeTabs({}) {
return (
<CodeTabs
tabs={[
TraceableQuickStart,
{
value: "python-run-tree",
label: "Python (Run Tree)",
language: "python",
content: `from langsmith.run_trees import RunTree\n
parent_run = RunTree(
name="My Chat Bot",
run_type="chain",
inputs={"text": "Summarize this morning's meetings."},
serialized={}
)\n
child_llm_run = parent_run.create_child(
name="My Proprietary LLM",
run_type="llm",
inputs={
"prompts": [
"You are an AI Assistant. Summarize this morning's meetings."
]
},
)\n
child_llm_run.end(outputs={"generations": ["Summary of the meeting..."]})
parent_run.end(outputs={"output": ["The meeting notes are as follows:..."]})\n
res = parent_run.post(exclude_child_runs=False)
res.result()`,
},
TypeScriptBlock(`import { RunTree, RunTreeConfig } from "langsmith";\n
const parentRunConfig: RunTreeConfig = {
name: "My Chat Bot",
run_type: "chain",
inputs: {
text: "Summarize this morning's meetings.",
},
serialized: {}
};\n
const parentRun = new RunTree(parentRunConfig);\n
const childLlmRun = await parentRun.createChild({
name: "My Proprietary LLM",
run_type: "llm",
inputs: {
prompts: [
"You are an AI Assistant. Summarize this morning's meetings.",
],
},
});\n
await childLlmRun.end({
outputs: {
generations: [
"Summary of the meeting...",
],
},
});\n
await parentRun.end({
outputs: {
output: ["The meeting notes are as follows:..."],
},
});\n
// False means post all nested runs as a batch
// (don't exclude child runs)
await parentRun.postRun(false);
`),
]}
groupId="client-language"
/>
);
}

View File

@@ -0,0 +1,135 @@
import React from "react";
import { CodeTabs, PythonBlock, TypeScriptBlock } from "./InstructionsWithCode";
export function RunTreeExampleCodeTabs() {
return (
<CodeTabs
tabs={[
PythonBlock(`from langsmith.run_trees import RunTree\n
parent_run = RunTree(
name="My Chat Bot",
run_type="chain",
inputs={"text": "Summarize this morning's meetings."},
serialized={}, # Serialized representation of this chain
# project_name= "Defaults to the LANGCHAIN_PROJECT env var"
# api_url= "Defaults to the LANGCHAIN_ENDPOINT env var"
# api_key= "Defaults to the LANGCHAIN_API_KEY env var"
)
# .. My Chat Bot calls an LLM
child_llm_run = parent_run.create_child(
name="My Proprietary LLM",
run_type="llm",
inputs={
"prompts": [
"You are an AI Assistant. The time is XYZ."
" Summarize this morning's meetings."
]
},
)
child_llm_run.end(
outputs={
"generations": [
"I should use the transcript_loader tool"
" to fetch meeting_transcripts from XYZ"
]
}
)
# .. My Chat Bot takes the LLM output and calls
# a tool / function for fetching transcripts ..
child_tool_run = parent_run.create_child(
name="transcript_loader",
run_type="tool",
inputs={"date": "XYZ", "content_type": "meeting_transcripts"},
)
# The tool returns meeting notes to the chat bot
child_tool_run.end(outputs={"meetings": ["Meeting1 notes.."]})\n
child_chain_run = parent_run.create_child(
name="Unreliable Component",
run_type="tool",
inputs={"input": "Summarize these notes..."},
)\n
try:
# .... the component does work
raise ValueError("Something went wrong")
except Exception as e:
child_chain_run.end(error=f"I errored again {e}")
pass
# .. The chat agent recovers\n
parent_run.end(outputs={"output": ["The meeting notes are as follows:..."]})\n
# This posts all nested runs as a batch
res = parent_run.post(exclude_child_runs=False)
res.result()
`),
TypeScriptBlock(`import { RunTree, RunTreeConfig } from "langsmith";\n
const parentRunConfig: RunTreeConfig = {
name: "My Chat Bot",
run_type: "chain",
inputs: {
text: "Summarize this morning's meetings.",
},
serialized: {}, // Serialized representation of this chain
// session_name: "Defaults to the LANGCHAIN_PROJECT env var"
// apiUrl: "Defaults to the LANGCHAIN_ENDPOINT env var"
// apiKey: "Defaults to the LANGCHAIN_API_KEY env var"
};\n
const parentRun = new RunTree(parentRunConfig);\n
const childLlmRun = await parentRun.createChild({
name: "My Proprietary LLM",
run_type: "llm",
inputs: {
prompts: [
"You are an AI Assistant. The time is XYZ." +
" Summarize this morning's meetings.",
],
},
});\n
await childLlmRun.end({
outputs: {
generations: [
"I should use the transcript_loader tool" +
" to fetch meeting_transcripts from XYZ",
],
},
});\n
const childToolRun = await parentRun.createChild({
name: "transcript_loader",
run_type: "tool",
inputs: {
date: "XYZ",
content_type: "meeting_transcripts",
},
});\n
await childToolRun.end({
outputs: {
meetings: ["Meeting1 notes.."],
},
});\n
const childChainRun = await parentRun.createChild({
name: "Unreliable Component",
run_type: "tool",
inputs: {
input: "Summarize these notes...",
},
});\n
try {
// .... the component does work
throw new Error("Something went wrong");
} catch (e) {
await childChainRun.end({
error: \`I errored again $\{e.message}\`,
});
}\n
await parentRun.end({
outputs: {
output: ["The meeting notes are as follows:..."],
},
});\n
// False means post all nested runs as a batch
// (don't exclude child runs)
await parentRun.postRun(false);
`),
]}
groupId="client-language"
/>
);
}

View File

@@ -12,47 +12,62 @@
* bundles Infima by default. Infima is a CSS framework designed to
* work well for content-centric websites.
*/
@font-face {
font-family: 'Manrope';
src: url('/fonts/Manrope-VariableFont_wght.ttf') format('truetype');
@font-face {
font-family: "Manrope";
src: url("/fonts/Manrope-VariableFont_wght.ttf") format("truetype");
}
@font-face {
font-family: 'Public Sans';
src: url('/fonts/PublicSans-VariableFont_wght.ttf') format('truetype');
font-family: "Public Sans";
src: url("/fonts/PublicSans-VariableFont_wght.ttf") format("truetype");
}
/* You can override the default Infima variables here. */
:root {
--ifm-color-primary: #2e8555;
--ifm-color-primary-dark: #29784c;
--ifm-color-primary-darker: #277148;
--ifm-color-primary-darkest: #205d3b;
--ifm-color-primary-light: #33925d;
--ifm-color-primary-lighter: #359962;
--ifm-color-primary-lightest: #3cad6e;
--ifm-font-weight-bold: 600;
--ifm-color-primary: var(--joy-palette-primary-600);
--ifm-color-primary-dark: var(--joy-palette-primary-600);
--ifm-color-primary-darker: var(--joy-palette-primary-700);
--ifm-color-primary-darkest: var(--joy-palette-primary-800);
--ifm-color-primary-light: var(--joy-palette-primary-400);
--ifm-color-primary-lighter: var(--joy-palette-primary-200);
--ifm-color-primary-lightest: var(--joy-palette-primary-100);
--ifm-code-font-size: 95%;
--ifm-font-family-base: 'Public Sans';
--ifm-menu-link-padding-horizontal: 0.5rem;
--ifm-menu-link-padding-vertical: 0.5rem;
--doc-sidebar-width: 275px !important;
--ifm-font-family-base: "Public Sans", sans-serif;
--docusaurus-highlighted-code-line-bg: var(--joy-palette-primary-100);
}
/* For readability concerns, you should choose a lighter palette in dark mode. */
[data-theme='dark'] {
--ifm-color-primary: #25c2a0;
--ifm-color-primary-dark: #21af90;
--ifm-color-primary-darker: #1fa588;
--ifm-color-primary-darkest: #1a8870;
--ifm-color-primary-light: #29d5b0;
--ifm-color-primary-lighter: #32d8b4;
--ifm-color-primary-lightest: #4fddbf;
[data-theme="dark"] {
--ifm-color-primary: var(--joy-palette-primary-400);
--ifm-color-primary-dark: var(--joy-palette-primary-500);
--ifm-color-primary-darker: var(--joy-palette-primary-600);
--ifm-color-primary-darkest: var(--joy-palette-primary-700);
--ifm-color-primary-light: var(--joy-palette-primary-300);
--ifm-color-primary-lighter: var(--joy-palette-primary-100);
--ifm-color-primary-lightest: var(--joy-palette-primary-50);
--docusaurus-highlighted-code-line-bg: var(--joy-palette-primary-800);
}
nav, h1, h2, h3, h4 {
font-family: 'Manrope';
nav,
h1,
h2,
h3,
h4 {
font-family: "Manrope";
}
html[data-theme="dark"] {
--ifm-background-color: var(--joy-palette-background-body);
}
.navbar {
outline: 1px solid var(--ifm-color-gray-200);
}
[data-theme="dark"] .navbar {
background-color: var(--joy-palette-common-black);
outline: 1px solid var(--ifm-color-gray-800);
}
.footer__links {
@@ -68,61 +83,13 @@ nav, h1, h2, h3, h4 {
opacity: 0.6;
}
.node-only {
position: relative;
}
.node-only::after {
position: absolute;
right: 0.35rem;
top: 5px;
content: "Node.js";
background: #026e00;
color: #fff;
border-radius: 0.25rem;
padding: 0 0.5rem;
pointer-events: none;
font-size: 0.85rem;
}
.node-only-category {
position: relative;
}
.node-only-category::after {
position: absolute;
right: 3rem;
top: 5px;
content: "Node.js";
background: #026e00;
color: #fff;
border-radius: 0.25rem;
padding: 0 0.5rem;
pointer-events: none;
font-size: 0.85rem;
}
/* .theme-code-block.language-python::before {
content: "";
padding: 2px 12px;
background-color: var(--ifm-color-primary-light);
color: #ffffff;
font-weight: bold;
border-top-right-radius: 4px;
border-top-left-radius: 4px;
display: block;
margin-bottom: 12px;
font-size: 0.6em;
width: 100%;
box-sizing: border-box;
position: relative;
} */
.theme-code-block.language-python,
.theme-code-block.language-javascript,
.theme-code-block.language-js,
.theme-code-block.language-typescript,
.theme-code-block.language-ts {
.theme-code-block.language-ts,
.theme-code-block.language-shell,
.theme-code-block.language-bash {
position: relative; /* Ensure this is set so the ::before pseudo-element is positioned relative to this element */
padding-left: 4px;
border: 1px solid var(--ifm-color-primary-darkest);
@@ -132,7 +99,9 @@ nav, h1, h2, h3, h4 {
.theme-code-block.language-javascript::before,
.theme-code-block.language-js::before,
.theme-code-block.language-typescript::before,
.theme-code-block.language-ts::before {
.theme-code-block.language-ts::before,
.theme-code-block.language-shell::before,
.theme-code-block.language-bash::before {
content: "";
position: absolute;
top: 0;
@@ -145,10 +114,45 @@ nav, h1, h2, h3, h4 {
z-index: 1;
}
.menu__list {
display: flex !important;
flex-direction: column;
gap: 0.125rem;
position: relative;
}
.menu__list:not(.theme-doc-sidebar-menu) {
padding-left: 1.125rem !important;
}
.menu__list::before {
content: "";
width: 1px;
left: 0.75rem;
top: 0.125rem;
bottom: 0.125rem;
position: absolute;
background-color: var(--joy-palette-background-level2);
}
.theme-doc-sidebar-menu.menu__list::before {
display: none;
}
.menu__link {
padding-left: 0.5rem;
}
.theme-doc-sidebar-menu > .theme-doc-sidebar-item-category:not(:first-of-type),
.theme-doc-sidebar-menu > .theme-doc-sidebar-item-link,
.theme-doc-sidebar-menu > .theme-doc-sidebar-item-link.theme-doc-sidebar-item-link-level-1:not(:first-of-type) {
margin-top: 1rem;
.theme-doc-sidebar-menu
> .theme-doc-sidebar-item-link.theme-doc-sidebar-item-link-level-1:not(
:first-of-type
) {
margin-top: 0.125rem;
}
.theme-doc-sidebar-menu .theme-doc-sidebar-item-link,
@@ -167,9 +171,10 @@ nav, h1, h2, h3, h4 {
margin-top: 0;
}
.theme-doc-sidebar-item-category, .theme-doc-sidebar-menu > .theme-doc-sidebar-item-link {
font-size: 1rem;
font-weight: 700;
.theme-doc-sidebar-item-category,
.theme-doc-sidebar-item-link {
font-size: 0.9rem !important;
font-weight: 500;
}
.theme-doc-sidebar-item-category button:before {
@@ -177,8 +182,9 @@ nav, h1, h2, h3, h4 {
width: 1.25rem;
}
.theme-doc-sidebar-item-link, .theme-doc-sidebar-item-category .theme-doc-sidebar-item-category {
font-size: .9rem;
.theme-doc-sidebar-item-link,
.theme-doc-sidebar-item-category .theme-doc-sidebar-item-category {
font-size: 0.9rem;
font-weight: 500;
}
@@ -197,23 +203,25 @@ nav, h1, h2, h3, h4 {
opacity: 0.5;
}
.markdown {
line-height: 2em;
.markdown > h1 {
margin-top: 1rem;
margin-bottom: 1rem !important;
font-size: 2.5rem !important;
}
.markdown > h2 {
margin-top: 2rem;
margin-top: 2.5rem;
border-bottom-color: var(--ifm-color-primary);
border-bottom-width: 2px;
padding-bottom: 1rem;
font-size: 1.8rem !important;
}
.markdown > :not(h2) + h3 {
margin-top: 1rem;
.markdown > :not(h2) + h3 {
margin-top: 2.5rem;
}
.markdown > h4 {
margin-bottom: 0.2rem;
margin-bottom: 1rem;
font-weight: 600;
}
@@ -226,10 +234,110 @@ nav, h1, h2, h3, h4 {
font-weight: 600;
}
/* Config search */
.DocSearch {
width: 250px;
}
.card {
padding: 1.5rem !important;
box-shadow: none !important;
}
.theme-code-block {
border-color: var(--joy-palette-divider) !important;
padding-left: 0 !important;
}
.theme-code-block::before {
display: none !important;
}
.tabs-container > .code-tabs + div {
margin-top: 0 !important;
box-shadow: none !important;
}
.tabs-container > .code-tabs {
border: 1px solid var(--joy-palette-divider);
border-bottom: none;
border-top-left-radius: var(--ifm-code-border-radius);
border-top-right-radius: var(--ifm-code-border-radius);
}
/* media dark mode */
@media (prefers-color-scheme: dark) {
.tabs-container > .code-tabs {
background: #1e1e1e;
}
.tabs-container .code-caption {
background: #1e1e1e;
}
}
.tabs-container > .code-tabs > [role="tab"] {
padding: 0.5rem 1rem !important;
border-radius: 0;
}
.tabs-container > .code-tabs + div .theme-code-block {
box-shadow: none !important;
border-top-left-radius: 0px !important;
border-top-right-radius: 0px !important;
}
.tabs-container > .code-tabs + div .code-caption {
border: 1px solid var(--joy-palette-divider);
border-bottom: none;
padding: 1rem;
}
.tabs-container > .code-tabs + div .code-caption p:last-child {
margin-bottom: 0;
}
.tabs-container:has(> .code-tabs) {
box-shadow: var(--ifm-global-shadow-lw);
border-radius: var(--ifm-code-border-radius);
}
.menu__list-item > .menu__list {
transition: all 0.2s ease-in-out !important;
}
.menu__list-item:not(.menu__list-item--collapsed) > .menu__list {
opacity: 1;
transform: scale(1);
}
.menu__list-item.menu__list-item--collapsed > .menu__list {
opacity: 0;
transform: scale(0.95);
}
/* prefers reduced motion */
@media (prefers-reduced-motion: reduce) {
.menu__list-item > .menu__list {
transition: none !important;
opacity: 1 !important;
}
}
.DocSearch .DocSearch-Button-Keys {
display: none;
}
hr {
background-color: var(--joy-palette-background-level2) !important;
}
/* for hiding subitems in autogenerated categories in sidebar */
.hidden {
display: none !important;
}
/* for github link in navbar */
.header-github-link:hover {
opacity: 0.6;
}
@@ -248,6 +356,7 @@ nav, h1, h2, h3, h4 {
no-repeat;
}
/* for announcement bar style */
div[class^=announcementBar_] {
height:40px !important;
font-size: 20px !important;

View File

@@ -0,0 +1,14 @@
import { useColorScheme as useColorSchemeMui } from "@mui/joy/styles";
// Same theme logic as in smith-frontend
export function useColorScheme() {
const { systemMode, setMode } = useColorSchemeMui();
const isDarkMode = systemMode === "dark";
return {
mode: systemMode,
isDarkMode,
isLightMode: !isDarkMode,
setMode,
};
}

View File

@@ -29,25 +29,14 @@ function Imports({ imports }) {
}
export default function CodeBlockWrapper({ children, ...props }) {
// Initialize imports as an empty array
let imports = [];
// Check if children is a string
if (typeof children === "string") {
// Search for an IMPORTS comment in the code
const match = /<!--IMPORTS:(.*?)-->\n/.exec(children);
if (match) {
imports = JSON.parse(match[1]);
children = children.replace(match[0], "");
}
} else if (children.imports) {
imports = children.imports;
return <CodeBlock {...props}>{children}</CodeBlock>;
}
return (
<>
<CodeBlock {...props}>{children}</CodeBlock>
{imports.length > 0 && <Imports imports={imports} />}
<CodeBlock {...props}>{children.content}</CodeBlock>
<Imports imports={children.imports} />
</>
);
}
}

View File

@@ -0,0 +1,18 @@
import React, { useEffect } from "react";
import ColorModeToggle from "@theme-original/ColorModeToggle";
import { useColorScheme } from "../../hooks/useColorScheme";
export default function ColorModeToggleWrapper(props) {
const { setMode } = useColorScheme();
// "value" holds the color theme. Either "light" or "dark"
const { value } = props;
// change mode based on "value" prop
// "dark" or "light" are also used for MUI
useEffect(() => {
setMode(value);
}, [value]);
return <ColorModeToggle {...props} />;
}

View File

@@ -1,21 +1,20 @@
// Swizzled class to show custom text for canary version.
// Should be removed in favor of the stock implementation.
import React from 'react';
import clsx from 'clsx';
import useDocusaurusContext from '@docusaurus/useDocusaurusContext';
import Link from '@docusaurus/Link';
import Translate from '@docusaurus/Translate';
import React from "react";
import clsx from "clsx";
import useDocusaurusContext from "@docusaurus/useDocusaurusContext";
import Link from "@docusaurus/Link";
import Translate from "@docusaurus/Translate";
import {
useActivePlugin,
useDocVersionSuggestions,
} from '@docusaurus/plugin-content-docs/client';
import {ThemeClassNames} from '@docusaurus/theme-common';
} from "@docusaurus/plugin-content-docs/client";
// eslint-disable-next-line import/no-extraneous-dependencies
import { ThemeClassNames } from "@docusaurus/theme-common";
import {
useDocsPreferredVersion,
useDocsVersion,
} from '@docusaurus/theme-common/internal';
function UnreleasedVersionLabel({siteTitle, versionMetadata}) {
} from "@docusaurus/theme-common/internal";
function UnreleasedVersionLabel({ siteTitle, versionMetadata }) {
return (
<Translate
id="theme.docs.versions.unreleasedVersionLabel"
@@ -23,14 +22,15 @@ function UnreleasedVersionLabel({siteTitle, versionMetadata}) {
values={{
siteTitle,
versionLabel: <b>{versionMetadata.label}</b>,
}}>
}}
>
{
'This is unreleased documentation for {siteTitle}\'s {versionLabel} version.'
"This is unreleased documentation for {siteTitle} {versionLabel} version."
}
</Translate>
);
}
function UnmaintainedVersionLabel({siteTitle, versionMetadata}) {
function UnmaintainedVersionLabel({ siteTitle, versionMetadata }) {
return (
<Translate
id="theme.docs.versions.unmaintainedVersionLabel"
@@ -38,9 +38,10 @@ function UnmaintainedVersionLabel({siteTitle, versionMetadata}) {
values={{
siteTitle,
versionLabel: <b>{versionMetadata.label}</b>,
}}>
}}
>
{
'This is documentation for {siteTitle} {versionLabel}, which is no longer actively maintained.'
"This is outdated documentation for {siteTitle}, which is no longer actively maintained."
}
</Translate>
);
@@ -51,10 +52,11 @@ const BannerLabelComponents = {
};
function BannerLabel(props) {
const BannerLabelComponent =
// eslint-disable-next-line react/destructuring-assignment
BannerLabelComponents[props.versionMetadata.banner];
return <BannerLabelComponent {...props} />;
}
function LatestVersionSuggestionLabel({versionLabel, to, onClick}) {
function LatestVersionSuggestionLabel({ versionLabel, to, onClick }) {
return (
<Translate
id="theme.docs.versions.latestVersionSuggestionLabel"
@@ -66,28 +68,28 @@ function LatestVersionSuggestionLabel({versionLabel, to, onClick}) {
<Link to={to} onClick={onClick}>
<Translate
id="theme.docs.versions.latestVersionLinkLabel"
description="The label used for the latest version suggestion link label">
this version
description="The label used for the latest version suggestion link label"
>
latest version
</Translate>
</Link>
</b>
),
}}>
{
'For the current stable version, see {latestVersionLink} ({versionLabel}).'
}
}}
>
{"For up-to-date documentation, see the {latestVersionLink}."}
</Translate>
);
}
function DocVersionBannerEnabled({className, versionMetadata}) {
function DocVersionBannerEnabled({ className, versionMetadata }) {
const {
siteConfig: {title: siteTitle},
siteConfig: { title: siteTitle },
} = useDocusaurusContext();
const {pluginId} = useActivePlugin({failfast: true});
const { pluginId } = useActivePlugin({ failfast: true });
const getVersionMainDoc = (version) =>
version.docs.find((doc) => doc.id === version.mainDocId);
const {savePreferredVersionName} = useDocsPreferredVersion(pluginId);
const {latestDocSuggestion, latestVersionSuggestion} =
const { savePreferredVersionName } = useDocsPreferredVersion(pluginId);
const { latestDocSuggestion, latestVersionSuggestion } =
useDocVersionSuggestions(pluginId);
// Try to link to same doc in latest version (not always possible), falling
// back to main doc of latest version
@@ -98,9 +100,10 @@ function DocVersionBannerEnabled({className, versionMetadata}) {
className={clsx(
className,
ThemeClassNames.docs.docVersionBanner,
'alert alert--warning margin-bottom--md',
"alert alert--warning margin-bottom--md"
)}
role="alert">
role="alert"
>
<div>
<BannerLabel siteTitle={siteTitle} versionMetadata={versionMetadata} />
</div>
@@ -114,71 +117,7 @@ function DocVersionBannerEnabled({className, versionMetadata}) {
</div>
);
}
function LatestDocVersionBanner({className, versionMetadata}) {
const {
siteConfig: {title: siteTitle},
} = useDocusaurusContext();
const {pluginId} = useActivePlugin({failfast: true});
const getVersionMainDoc = (version) =>
version.docs.find((doc) => doc.id === version.mainDocId);
const {savePreferredVersionName} = useDocsPreferredVersion(pluginId);
const {latestDocSuggestion, latestVersionSuggestion} =
useDocVersionSuggestions(pluginId);
// Try to link to same doc in latest version (not always possible), falling
// back to main doc of latest version
const latestVersionSuggestedDoc =
latestDocSuggestion ?? getVersionMainDoc(latestVersionSuggestion);
const canaryPath = `/docs/0.2.x/${latestVersionSuggestedDoc.path.slice("/docs/".length)}`;
return (
<div
className={clsx(
className,
ThemeClassNames.docs.docVersionBanner,
'alert alert--info margin-bottom--md',
)}
role="alert">
<div>
<Translate
id="theme.docs.versions.unmaintainedVersionLabel"
description="The label used to encourage the user to view the experimental 0.2.x version"
values={{
siteTitle,
versionLabel: <b>{versionMetadata.label}</b>,
}}>
{
'This is a stable version of documentation for {siteTitle}\'s version {versionLabel}.'
}
</Translate>
</div>
<div className="margin-top--md">
<Translate
id="theme.docs.versions.latestVersionSuggestionLabel"
description="The label used to tell the user to check the experimental version"
values={{
versionLabel: <b>{versionMetadata.label}</b>,
latestVersionLink: (
<b>
<Link to={canaryPath} onClick={() => savePreferredVersionName("0.2.x")}>
<Translate
id="theme.docs.versions.latestVersionLinkLabel"
description="The label used for the latest version suggestion link label">
this experimental version
</Translate>
</Link>
</b>
),
}}>
{
'You can also check out {latestVersionLink} for an updated experience.'
}
</Translate>
</div>
</div>
);
}
export default function DocVersionBanner({className}) {
export default function DocVersionBanner({ className }) {
const versionMetadata = useDocsVersion();
if (versionMetadata.banner) {
return (
@@ -187,15 +126,6 @@ export default function DocVersionBanner({className}) {
versionMetadata={versionMetadata}
/>
);
} else if (versionMetadata.isLast) {
// Uncomment when we are ready to direct people to new build
// return (
// <LatestDocVersionBanner
// className={className}
// versionMetadata={versionMetadata}
// />
// );
return null;
}
return null;
}

View File

@@ -98,7 +98,7 @@ function SvgThumbsDown() {
const FEEDBACK_COOKIE_PREFIX = "feedbackSent";
/** @type {Database["public"]["Enums"]["project_type"]} */
const LANGCHAIN_PROJECT_NAME = "langchain_py_docs";
const LANGCHAIN_PROJECT_NAME = "langsmith_docs";
/**
* @returns {Promise<string>}
@@ -112,7 +112,6 @@ export default function Feedback() {
const { setCookie, checkCookie } = useCookie();
const [feedbackSent, setFeedbackSent] = useState(false);
const { siteConfig } = useDocusaurusContext();
const [pathname, setPathname] = useState("");
/** @param {"good" | "bad"} feedback */
const handleFeedback = async (feedback) => {
@@ -168,7 +167,6 @@ export default function Feedback() {
// (cookies exp in 24hrs)
const cookieName = `${FEEDBACK_COOKIE_PREFIX}_${window.location.pathname}`;
setFeedbackSent(checkCookie(cookieName));
setPathname(window.location.pathname);
}
}, []);
@@ -180,18 +178,24 @@ export default function Feedback() {
paddingBottom: "10px",
paddingLeft: "22px",
paddingRight: "22px",
border: "1px solid gray",
border: "1px solid var(--joy-palette-background-level2)",
borderRadius: "6px",
gap: "10px",
cursor: "pointer",
fontSize: "16px",
fontWeight: "600",
},
onMouseEnter: (e) => (e.currentTarget.style.backgroundColor = "#f0f0f0"),
onMouseEnter: (e) =>
(e.currentTarget.style.backgroundColor =
"var(--joy-palette-background-level1)"),
onMouseLeave: (e) =>
(e.currentTarget.style.backgroundColor = "transparent"),
onMouseDown: (e) => (e.currentTarget.style.backgroundColor = "#d0d0d0"),
onMouseUp: (e) => (e.currentTarget.style.backgroundColor = "#f0f0f0"),
onMouseDown: (e) =>
(e.currentTarget.style.backgroundColor =
"var(--joy-palette-background-level2)"),
onMouseUp: (e) =>
(e.currentTarget.style.backgroundColor =
"var(--joy-palette-background-level1)"),
};
const newGithubIssueURL = pathname
@@ -246,14 +250,6 @@ export default function Feedback() {
</div>
</>
)}
<br />
<h4>
You can leave detailed feedback{" "}
<a target="_blank" href={newGithubIssueURL}>
on GitHub
</a>
.
</h4>
</div>
);
}

18
docs/src/theme/Root.js Normal file
View File

@@ -0,0 +1,18 @@
import React from "react";
import { CssVarsProvider, getInitColorSchemeScript } from "@mui/joy/styles";
import CssBaseline from "@mui/joy/CssBaseline";
export default function Root({ children }) {
return (
<>
{getInitColorSchemeScript()}
<CssBaseline />
<CssVarsProvider
defaultMode="system"
modeStorageKey="langsmith-docs-joy-mode"
>
{children}
</CssVarsProvider>
</>
);
}

View File

@@ -0,0 +1,6 @@
import React from "react";
import SearchBar from "@theme-original/SearchBar";
export default function SearchBarWrapper(props) {
return <SearchBar {...props} />;
}

File diff suppressed because it is too large Load Diff