Compare commits

...

6 Commits

Author SHA1 Message Date
Bagatur
2c498f0f03 fix 2024-03-14 15:39:04 -07:00
Bagatur
dc90181286 fix 2024-03-14 15:20:42 -07:00
Bagatur
54a4e2a3d3 fix 2024-03-14 15:07:31 -07:00
Bagatur
06eb3e4ad8 add 2024-03-14 14:48:23 -07:00
Bagatur
ba8011f29b dropdown 2024-03-14 14:43:21 -07:00
Bagatur
8ca2032021 wip: versioned docs 2024-03-14 14:41:47 -07:00
1334 changed files with 294687 additions and 0 deletions

View File

@@ -82,6 +82,8 @@ const config = {
({
docs: {
sidebarPath: require.resolve("./sidebars.js"),
lastVersion: "current",
versions: {current: {label: "0.2.x", path: "0.2.x"}},
remarkPlugins: [
[require("@docusaurus/remark-plugin-npm2yarn"), { sync: true }],
],
@@ -217,6 +219,12 @@ const config = {
},
]
},
{
type: 'docsVersionDropdown',
position: 'left',
dropdownItemsAfter: [{to: '/versions', label: 'All versions'}],
dropdownActiveClassDisabled: true,
},
{
type: "dropdown",
label: "🦜️🔗",

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 147 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 56 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 148 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 193 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 190 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 168 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 74 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 166 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 150 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 167 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 777 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 192 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 93 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 102 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 84 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 54 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 164 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 125 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 325 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 131 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 432 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 336 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 542 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 67 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 74 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 820 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 35 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 64 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 136 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 483 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 103 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 164 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 129 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 126 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 195 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 119 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 77 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 90 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 111 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 130 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 100 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 304 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 152 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 172 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 194 KiB

View File

@@ -0,0 +1,5 @@
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'G-9B66JQQH2F');

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 486 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 486 KiB

View File

@@ -0,0 +1,7 @@
.yarn/
node_modules/
.docusaurus
.cache-loader
docs/api

View File

@@ -0,0 +1,60 @@
[comment: Please, a reference example here "docs/integrations/arxiv.md"]::
[comment: Use this template to create a new .md file in "docs/integrations/"]::
# Title_REPLACE_ME
[comment: Only one Tile/H1 is allowed!]::
>
[comment: Description: After reading this description, a reader should decide if this integration is good enough to try/follow reading OR]::
[comment: go to read the next integration doc. ]::
[comment: Description should include a link to the source for follow reading.]::
## Installation and Setup
[comment: Installation and Setup: All necessary additional package installations and setups for Tokens, etc]::
```bash
pip install package_name_REPLACE_ME
```
[comment: OR this text:]::
There isn't any special setup for it.
[comment: The next H2/## sections with names of the integration modules, like "LLM", "Text Embedding Models", etc]::
[comment: see "Modules" in the "index.html" page]::
[comment: Each H2 section should include a link to an example(s) and a Python code with the import of the integration class]::
[comment: Below are several example sections. Remove all unnecessary sections. Add all necessary sections not provided here.]::
## LLM
See a [usage example](/docs/integrations/llms/INCLUDE_REAL_NAME).
```python
from langchain_community.llms import integration_class_REPLACE_ME
```
## Text Embedding Models
See a [usage example](/docs/integrations/text_embedding/INCLUDE_REAL_NAME).
```python
from langchain_community.embeddings import integration_class_REPLACE_ME
```
## Chat models
See a [usage example](/docs/integrations/chat/INCLUDE_REAL_NAME).
```python
from langchain_community.chat_models import integration_class_REPLACE_ME
```
## Document Loader
See a [usage example](/docs/integrations/document_loaders/INCLUDE_REAL_NAME).
```python
from langchain_community.document_loaders import integration_class_REPLACE_ME
```

View File

@@ -0,0 +1,556 @@
# Dependents
Dependents stats for `langchain-ai/langchain`
[![](https://img.shields.io/static/v1?label=Used%20by&message=41717&color=informational&logo=slickpic)](https://github.com/langchain-ai/langchain/network/dependents)
[![](https://img.shields.io/static/v1?label=Used%20by%20(public)&message=538&color=informational&logo=slickpic)](https://github.com/langchain-ai/langchain/network/dependents)
[![](https://img.shields.io/static/v1?label=Used%20by%20(private)&message=41179&color=informational&logo=slickpic)](https://github.com/langchain-ai/langchain/network/dependents)
[update: `2023-12-08`; only dependent repositories with Stars > 100]
| Repository | Stars |
| :-------- | -----: |
|[AntonOsika/gpt-engineer](https://github.com/AntonOsika/gpt-engineer) | 46514 |
|[imartinez/privateGPT](https://github.com/imartinez/privateGPT) | 44439 |
|[LAION-AI/Open-Assistant](https://github.com/LAION-AI/Open-Assistant) | 35906 |
|[hpcaitech/ColossalAI](https://github.com/hpcaitech/ColossalAI) | 35528 |
|[moymix/TaskMatrix](https://github.com/moymix/TaskMatrix) | 34342 |
|[geekan/MetaGPT](https://github.com/geekan/MetaGPT) | 31126 |
|[streamlit/streamlit](https://github.com/streamlit/streamlit) | 28911 |
|[reworkd/AgentGPT](https://github.com/reworkd/AgentGPT) | 27833 |
|[StanGirard/quivr](https://github.com/StanGirard/quivr) | 26032 |
|[OpenBB-finance/OpenBBTerminal](https://github.com/OpenBB-finance/OpenBBTerminal) | 24946 |
|[run-llama/llama_index](https://github.com/run-llama/llama_index) | 24859 |
|[jmorganca/ollama](https://github.com/jmorganca/ollama) | 20849 |
|[openai/chatgpt-retrieval-plugin](https://github.com/openai/chatgpt-retrieval-plugin) | 20249 |
|[chatchat-space/Langchain-Chatchat](https://github.com/chatchat-space/Langchain-Chatchat) | 19305 |
|[mindsdb/mindsdb](https://github.com/mindsdb/mindsdb) | 19172 |
|[PromtEngineer/localGPT](https://github.com/PromtEngineer/localGPT) | 17528 |
|[cube-js/cube](https://github.com/cube-js/cube) | 16575 |
|[mlflow/mlflow](https://github.com/mlflow/mlflow) | 16000 |
|[mudler/LocalAI](https://github.com/mudler/LocalAI) | 14067 |
|[logspace-ai/langflow](https://github.com/logspace-ai/langflow) | 13679 |
|[GaiZhenbiao/ChuanhuChatGPT](https://github.com/GaiZhenbiao/ChuanhuChatGPT) | 13648 |
|[arc53/DocsGPT](https://github.com/arc53/DocsGPT) | 13423 |
|[openai/evals](https://github.com/openai/evals) | 12649 |
|[airbytehq/airbyte](https://github.com/airbytehq/airbyte) | 12460 |
|[langgenius/dify](https://github.com/langgenius/dify) | 11859 |
|[databrickslabs/dolly](https://github.com/databrickslabs/dolly) | 10672 |
|[AIGC-Audio/AudioGPT](https://github.com/AIGC-Audio/AudioGPT) | 9437 |
|[langchain-ai/langchainjs](https://github.com/langchain-ai/langchainjs) | 9227 |
|[gventuri/pandas-ai](https://github.com/gventuri/pandas-ai) | 9203 |
|[aws/amazon-sagemaker-examples](https://github.com/aws/amazon-sagemaker-examples) | 9079 |
|[h2oai/h2ogpt](https://github.com/h2oai/h2ogpt) | 8945 |
|[PipedreamHQ/pipedream](https://github.com/PipedreamHQ/pipedream) | 7550 |
|[bentoml/OpenLLM](https://github.com/bentoml/OpenLLM) | 6957 |
|[THUDM/ChatGLM3](https://github.com/THUDM/ChatGLM3) | 6801 |
|[microsoft/promptflow](https://github.com/microsoft/promptflow) | 6776 |
|[cpacker/MemGPT](https://github.com/cpacker/MemGPT) | 6642 |
|[joshpxyne/gpt-migrate](https://github.com/joshpxyne/gpt-migrate) | 6482 |
|[zauberzeug/nicegui](https://github.com/zauberzeug/nicegui) | 6037 |
|[embedchain/embedchain](https://github.com/embedchain/embedchain) | 6023 |
|[mage-ai/mage-ai](https://github.com/mage-ai/mage-ai) | 6019 |
|[assafelovic/gpt-researcher](https://github.com/assafelovic/gpt-researcher) | 5936 |
|[sweepai/sweep](https://github.com/sweepai/sweep) | 5855 |
|[wenda-LLM/wenda](https://github.com/wenda-LLM/wenda) | 5766 |
|[zilliztech/GPTCache](https://github.com/zilliztech/GPTCache) | 5710 |
|[pdm-project/pdm](https://github.com/pdm-project/pdm) | 5665 |
|[GreyDGL/PentestGPT](https://github.com/GreyDGL/PentestGPT) | 5568 |
|[gkamradt/langchain-tutorials](https://github.com/gkamradt/langchain-tutorials) | 5507 |
|[Shaunwei/RealChar](https://github.com/Shaunwei/RealChar) | 5501 |
|[facebookresearch/llama-recipes](https://github.com/facebookresearch/llama-recipes) | 5477 |
|[serge-chat/serge](https://github.com/serge-chat/serge) | 5221 |
|[run-llama/rags](https://github.com/run-llama/rags) | 4916 |
|[openchatai/OpenChat](https://github.com/openchatai/OpenChat) | 4870 |
|[danswer-ai/danswer](https://github.com/danswer-ai/danswer) | 4774 |
|[langchain-ai/opengpts](https://github.com/langchain-ai/opengpts) | 4709 |
|[postgresml/postgresml](https://github.com/postgresml/postgresml) | 4639 |
|[MineDojo/Voyager](https://github.com/MineDojo/Voyager) | 4582 |
|[intel-analytics/BigDL](https://github.com/intel-analytics/BigDL) | 4581 |
|[yihong0618/xiaogpt](https://github.com/yihong0618/xiaogpt) | 4359 |
|[RayVentura/ShortGPT](https://github.com/RayVentura/ShortGPT) | 4357 |
|[Azure-Samples/azure-search-openai-demo](https://github.com/Azure-Samples/azure-search-openai-demo) | 4317 |
|[madawei2699/myGPTReader](https://github.com/madawei2699/myGPTReader) | 4289 |
|[apache/nifi](https://github.com/apache/nifi) | 4098 |
|[langchain-ai/chat-langchain](https://github.com/langchain-ai/chat-langchain) | 4091 |
|[aiwaves-cn/agents](https://github.com/aiwaves-cn/agents) | 4073 |
|[krishnaik06/The-Grand-Complete-Data-Science-Materials](https://github.com/krishnaik06/The-Grand-Complete-Data-Science-Materials) | 4065 |
|[khoj-ai/khoj](https://github.com/khoj-ai/khoj) | 4016 |
|[Azure/azure-sdk-for-python](https://github.com/Azure/azure-sdk-for-python) | 3941 |
|[PrefectHQ/marvin](https://github.com/PrefectHQ/marvin) | 3915 |
|[OpenBMB/ToolBench](https://github.com/OpenBMB/ToolBench) | 3799 |
|[marqo-ai/marqo](https://github.com/marqo-ai/marqo) | 3771 |
|[kyegomez/tree-of-thoughts](https://github.com/kyegomez/tree-of-thoughts) | 3688 |
|[Unstructured-IO/unstructured](https://github.com/Unstructured-IO/unstructured) | 3543 |
|[llm-workflow-engine/llm-workflow-engine](https://github.com/llm-workflow-engine/llm-workflow-engine) | 3515 |
|[shroominic/codeinterpreter-api](https://github.com/shroominic/codeinterpreter-api) | 3425 |
|[openchatai/OpenCopilot](https://github.com/openchatai/OpenCopilot) | 3418 |
|[josStorer/RWKV-Runner](https://github.com/josStorer/RWKV-Runner) | 3297 |
|[whitead/paper-qa](https://github.com/whitead/paper-qa) | 3280 |
|[homanp/superagent](https://github.com/homanp/superagent) | 3258 |
|[ParisNeo/lollms-webui](https://github.com/ParisNeo/lollms-webui) | 3199 |
|[OpenBMB/AgentVerse](https://github.com/OpenBMB/AgentVerse) | 3099 |
|[project-baize/baize-chatbot](https://github.com/project-baize/baize-chatbot) | 3090 |
|[OpenGVLab/InternGPT](https://github.com/OpenGVLab/InternGPT) | 2989 |
|[xlang-ai/OpenAgents](https://github.com/xlang-ai/OpenAgents) | 2825 |
|[dataelement/bisheng](https://github.com/dataelement/bisheng) | 2797 |
|[Mintplex-Labs/anything-llm](https://github.com/Mintplex-Labs/anything-llm) | 2784 |
|[OpenBMB/BMTools](https://github.com/OpenBMB/BMTools) | 2734 |
|[run-llama/llama-hub](https://github.com/run-llama/llama-hub) | 2721 |
|[SamurAIGPT/EmbedAI](https://github.com/SamurAIGPT/EmbedAI) | 2647 |
|[NVIDIA/NeMo-Guardrails](https://github.com/NVIDIA/NeMo-Guardrails) | 2637 |
|[X-D-Lab/LangChain-ChatGLM-Webui](https://github.com/X-D-Lab/LangChain-ChatGLM-Webui) | 2532 |
|[GerevAI/gerev](https://github.com/GerevAI/gerev) | 2517 |
|[keephq/keep](https://github.com/keephq/keep) | 2448 |
|[yanqiangmiffy/Chinese-LangChain](https://github.com/yanqiangmiffy/Chinese-LangChain) | 2397 |
|[OpenGVLab/Ask-Anything](https://github.com/OpenGVLab/Ask-Anything) | 2324 |
|[IntelligenzaArtificiale/Free-Auto-GPT](https://github.com/IntelligenzaArtificiale/Free-Auto-GPT) | 2241 |
|[YiVal/YiVal](https://github.com/YiVal/YiVal) | 2232 |
|[jupyterlab/jupyter-ai](https://github.com/jupyterlab/jupyter-ai) | 2189 |
|[Farama-Foundation/PettingZoo](https://github.com/Farama-Foundation/PettingZoo) | 2136 |
|[microsoft/TaskWeaver](https://github.com/microsoft/TaskWeaver) | 2126 |
|[hwchase17/notion-qa](https://github.com/hwchase17/notion-qa) | 2083 |
|[FlagOpen/FlagEmbedding](https://github.com/FlagOpen/FlagEmbedding) | 2053 |
|[paulpierre/RasaGPT](https://github.com/paulpierre/RasaGPT) | 1999 |
|[hegelai/prompttools](https://github.com/hegelai/prompttools) | 1984 |
|[mckinsey/vizro](https://github.com/mckinsey/vizro) | 1951 |
|[vocodedev/vocode-python](https://github.com/vocodedev/vocode-python) | 1868 |
|[dot-agent/openAMS](https://github.com/dot-agent/openAMS) | 1796 |
|[explodinggradients/ragas](https://github.com/explodinggradients/ragas) | 1766 |
|[AI-Citizen/SolidGPT](https://github.com/AI-Citizen/SolidGPT) | 1761 |
|[Kav-K/GPTDiscord](https://github.com/Kav-K/GPTDiscord) | 1696 |
|[run-llama/sec-insights](https://github.com/run-llama/sec-insights) | 1654 |
|[avinashkranjan/Amazing-Python-Scripts](https://github.com/avinashkranjan/Amazing-Python-Scripts) | 1635 |
|[microsoft/WhatTheHack](https://github.com/microsoft/WhatTheHack) | 1629 |
|[noahshinn/reflexion](https://github.com/noahshinn/reflexion) | 1625 |
|[psychic-api/psychic](https://github.com/psychic-api/psychic) | 1618 |
|[Forethought-Technologies/AutoChain](https://github.com/Forethought-Technologies/AutoChain) | 1611 |
|[pinterest/querybook](https://github.com/pinterest/querybook) | 1586 |
|[refuel-ai/autolabel](https://github.com/refuel-ai/autolabel) | 1553 |
|[jina-ai/langchain-serve](https://github.com/jina-ai/langchain-serve) | 1537 |
|[jina-ai/dev-gpt](https://github.com/jina-ai/dev-gpt) | 1522 |
|[agiresearch/OpenAGI](https://github.com/agiresearch/OpenAGI) | 1493 |
|[ttengwang/Caption-Anything](https://github.com/ttengwang/Caption-Anything) | 1484 |
|[greshake/llm-security](https://github.com/greshake/llm-security) | 1483 |
|[promptfoo/promptfoo](https://github.com/promptfoo/promptfoo) | 1480 |
|[milvus-io/bootcamp](https://github.com/milvus-io/bootcamp) | 1477 |
|[richardyc/Chrome-GPT](https://github.com/richardyc/Chrome-GPT) | 1475 |
|[melih-unsal/DemoGPT](https://github.com/melih-unsal/DemoGPT) | 1428 |
|[YORG-AI/Open-Assistant](https://github.com/YORG-AI/Open-Assistant) | 1419 |
|[101dotxyz/GPTeam](https://github.com/101dotxyz/GPTeam) | 1416 |
|[jina-ai/thinkgpt](https://github.com/jina-ai/thinkgpt) | 1408 |
|[mmz-001/knowledge_gpt](https://github.com/mmz-001/knowledge_gpt) | 1398 |
|[intel/intel-extension-for-transformers](https://github.com/intel/intel-extension-for-transformers) | 1387 |
|[Azure/azureml-examples](https://github.com/Azure/azureml-examples) | 1385 |
|[lunasec-io/lunasec](https://github.com/lunasec-io/lunasec) | 1367 |
|[eyurtsev/kor](https://github.com/eyurtsev/kor) | 1355 |
|[xusenlinzy/api-for-open-llm](https://github.com/xusenlinzy/api-for-open-llm) | 1325 |
|[griptape-ai/griptape](https://github.com/griptape-ai/griptape) | 1323 |
|[SuperDuperDB/superduperdb](https://github.com/SuperDuperDB/superduperdb) | 1290 |
|[cofactoryai/textbase](https://github.com/cofactoryai/textbase) | 1284 |
|[psychic-api/rag-stack](https://github.com/psychic-api/rag-stack) | 1260 |
|[filip-michalsky/SalesGPT](https://github.com/filip-michalsky/SalesGPT) | 1250 |
|[nod-ai/SHARK](https://github.com/nod-ai/SHARK) | 1237 |
|[pluralsh/plural](https://github.com/pluralsh/plural) | 1234 |
|[cheshire-cat-ai/core](https://github.com/cheshire-cat-ai/core) | 1194 |
|[LC1332/Chat-Haruhi-Suzumiya](https://github.com/LC1332/Chat-Haruhi-Suzumiya) | 1184 |
|[poe-platform/server-bot-quick-start](https://github.com/poe-platform/server-bot-quick-start) | 1182 |
|[microsoft/X-Decoder](https://github.com/microsoft/X-Decoder) | 1180 |
|[juncongmoo/chatllama](https://github.com/juncongmoo/chatllama) | 1171 |
|[visual-openllm/visual-openllm](https://github.com/visual-openllm/visual-openllm) | 1156 |
|[alejandro-ao/ask-multiple-pdfs](https://github.com/alejandro-ao/ask-multiple-pdfs) | 1153 |
|[ThousandBirdsInc/chidori](https://github.com/ThousandBirdsInc/chidori) | 1152 |
|[irgolic/AutoPR](https://github.com/irgolic/AutoPR) | 1137 |
|[SamurAIGPT/Camel-AutoGPT](https://github.com/SamurAIGPT/Camel-AutoGPT) | 1083 |
|[ray-project/llm-applications](https://github.com/ray-project/llm-applications) | 1080 |
|[run-llama/llama-lab](https://github.com/run-llama/llama-lab) | 1072 |
|[jiran214/GPT-vup](https://github.com/jiran214/GPT-vup) | 1041 |
|[MetaGLM/FinGLM](https://github.com/MetaGLM/FinGLM) | 1035 |
|[peterw/Chat-with-Github-Repo](https://github.com/peterw/Chat-with-Github-Repo) | 1020 |
|[Anil-matcha/ChatPDF](https://github.com/Anil-matcha/ChatPDF) | 991 |
|[langchain-ai/langserve](https://github.com/langchain-ai/langserve) | 983 |
|[THUDM/AgentTuning](https://github.com/THUDM/AgentTuning) | 976 |
|[rlancemartin/auto-evaluator](https://github.com/rlancemartin/auto-evaluator) | 975 |
|[codeacme17/examor](https://github.com/codeacme17/examor) | 964 |
|[all-in-aigc/gpts-works](https://github.com/all-in-aigc/gpts-works) | 946 |
|[Ikaros-521/AI-Vtuber](https://github.com/Ikaros-521/AI-Vtuber) | 946 |
|[microsoft/Llama-2-Onnx](https://github.com/microsoft/Llama-2-Onnx) | 898 |
|[cirediatpl/FigmaChain](https://github.com/cirediatpl/FigmaChain) | 895 |
|[ricklamers/shell-ai](https://github.com/ricklamers/shell-ai) | 893 |
|[modelscope/modelscope-agent](https://github.com/modelscope/modelscope-agent) | 893 |
|[seanpixel/Teenage-AGI](https://github.com/seanpixel/Teenage-AGI) | 886 |
|[ajndkr/lanarky](https://github.com/ajndkr/lanarky) | 880 |
|[kennethleungty/Llama-2-Open-Source-LLM-CPU-Inference](https://github.com/kennethleungty/Llama-2-Open-Source-LLM-CPU-Inference) | 872 |
|[corca-ai/EVAL](https://github.com/corca-ai/EVAL) | 846 |
|[hwchase17/chat-your-data](https://github.com/hwchase17/chat-your-data) | 841 |
|[kreneskyp/ix](https://github.com/kreneskyp/ix) | 821 |
|[Link-AGI/AutoAgents](https://github.com/Link-AGI/AutoAgents) | 820 |
|[truera/trulens](https://github.com/truera/trulens) | 794 |
|[Dataherald/dataherald](https://github.com/Dataherald/dataherald) | 788 |
|[sunlabuiuc/PyHealth](https://github.com/sunlabuiuc/PyHealth) | 783 |
|[jondurbin/airoboros](https://github.com/jondurbin/airoboros) | 783 |
|[pyspark-ai/pyspark-ai](https://github.com/pyspark-ai/pyspark-ai) | 782 |
|[confident-ai/deepeval](https://github.com/confident-ai/deepeval) | 780 |
|[billxbf/ReWOO](https://github.com/billxbf/ReWOO) | 777 |
|[langchain-ai/streamlit-agent](https://github.com/langchain-ai/streamlit-agent) | 776 |
|[akshata29/entaoai](https://github.com/akshata29/entaoai) | 771 |
|[LambdaLabsML/examples](https://github.com/LambdaLabsML/examples) | 770 |
|[getmetal/motorhead](https://github.com/getmetal/motorhead) | 768 |
|[Dicklesworthstone/swiss_army_llama](https://github.com/Dicklesworthstone/swiss_army_llama) | 757 |
|[ruoccofabrizio/azure-open-ai-embeddings-qna](https://github.com/ruoccofabrizio/azure-open-ai-embeddings-qna) | 757 |
|[msoedov/langcorn](https://github.com/msoedov/langcorn) | 754 |
|[e-johnstonn/BriefGPT](https://github.com/e-johnstonn/BriefGPT) | 753 |
|[microsoft/sample-app-aoai-chatGPT](https://github.com/microsoft/sample-app-aoai-chatGPT) | 749 |
|[explosion/spacy-llm](https://github.com/explosion/spacy-llm) | 731 |
|[MiuLab/Taiwan-LLM](https://github.com/MiuLab/Taiwan-LLM) | 716 |
|[whyiyhw/chatgpt-wechat](https://github.com/whyiyhw/chatgpt-wechat) | 702 |
|[Azure-Samples/openai](https://github.com/Azure-Samples/openai) | 692 |
|[iusztinpaul/hands-on-llms](https://github.com/iusztinpaul/hands-on-llms) | 687 |
|[safevideo/autollm](https://github.com/safevideo/autollm) | 682 |
|[OpenGenerativeAI/GenossGPT](https://github.com/OpenGenerativeAI/GenossGPT) | 669 |
|[NoDataFound/hackGPT](https://github.com/NoDataFound/hackGPT) | 663 |
|[AILab-CVC/GPT4Tools](https://github.com/AILab-CVC/GPT4Tools) | 662 |
|[langchain-ai/auto-evaluator](https://github.com/langchain-ai/auto-evaluator) | 657 |
|[yvann-ba/Robby-chatbot](https://github.com/yvann-ba/Robby-chatbot) | 639 |
|[alexanderatallah/window.ai](https://github.com/alexanderatallah/window.ai) | 635 |
|[amosjyng/langchain-visualizer](https://github.com/amosjyng/langchain-visualizer) | 630 |
|[microsoft/PodcastCopilot](https://github.com/microsoft/PodcastCopilot) | 621 |
|[aws-samples/aws-genai-llm-chatbot](https://github.com/aws-samples/aws-genai-llm-chatbot) | 616 |
|[NeumTry/NeumAI](https://github.com/NeumTry/NeumAI) | 605 |
|[namuan/dr-doc-search](https://github.com/namuan/dr-doc-search) | 599 |
|[plastic-labs/tutor-gpt](https://github.com/plastic-labs/tutor-gpt) | 595 |
|[marimo-team/marimo](https://github.com/marimo-team/marimo) | 591 |
|[yakami129/VirtualWife](https://github.com/yakami129/VirtualWife) | 586 |
|[xuwenhao/geektime-ai-course](https://github.com/xuwenhao/geektime-ai-course) | 584 |
|[jonra1993/fastapi-alembic-sqlmodel-async](https://github.com/jonra1993/fastapi-alembic-sqlmodel-async) | 573 |
|[dgarnitz/vectorflow](https://github.com/dgarnitz/vectorflow) | 568 |
|[yeagerai/yeagerai-agent](https://github.com/yeagerai/yeagerai-agent) | 564 |
|[daveebbelaar/langchain-experiments](https://github.com/daveebbelaar/langchain-experiments) | 563 |
|[traceloop/openllmetry](https://github.com/traceloop/openllmetry) | 559 |
|[Agenta-AI/agenta](https://github.com/Agenta-AI/agenta) | 546 |
|[michaelthwan/searchGPT](https://github.com/michaelthwan/searchGPT) | 545 |
|[jina-ai/agentchain](https://github.com/jina-ai/agentchain) | 544 |
|[mckaywrigley/repo-chat](https://github.com/mckaywrigley/repo-chat) | 533 |
|[marella/chatdocs](https://github.com/marella/chatdocs) | 532 |
|[opentensor/bittensor](https://github.com/opentensor/bittensor) | 532 |
|[DjangoPeng/openai-quickstart](https://github.com/DjangoPeng/openai-quickstart) | 527 |
|[freddyaboulton/gradio-tools](https://github.com/freddyaboulton/gradio-tools) | 517 |
|[sidhq/Multi-GPT](https://github.com/sidhq/Multi-GPT) | 515 |
|[alejandro-ao/langchain-ask-pdf](https://github.com/alejandro-ao/langchain-ask-pdf) | 514 |
|[sajjadium/ctf-archives](https://github.com/sajjadium/ctf-archives) | 507 |
|[continuum-llms/chatgpt-memory](https://github.com/continuum-llms/chatgpt-memory) | 502 |
|[llmOS/opencopilot](https://github.com/llmOS/opencopilot) | 495 |
|[steamship-core/steamship-langchain](https://github.com/steamship-core/steamship-langchain) | 494 |
|[mpaepper/content-chatbot](https://github.com/mpaepper/content-chatbot) | 493 |
|[langchain-ai/langchain-aiplugin](https://github.com/langchain-ai/langchain-aiplugin) | 492 |
|[logan-markewich/llama_index_starter_pack](https://github.com/logan-markewich/llama_index_starter_pack) | 483 |
|[datawhalechina/llm-universe](https://github.com/datawhalechina/llm-universe) | 475 |
|[leondz/garak](https://github.com/leondz/garak) | 464 |
|[RedisVentures/ArXivChatGuru](https://github.com/RedisVentures/ArXivChatGuru) | 461 |
|[Anil-matcha/Chatbase](https://github.com/Anil-matcha/Chatbase) | 455 |
|[Aiyu-awa/luna-ai](https://github.com/Aiyu-awa/luna-ai) | 450 |
|[DataDog/dd-trace-py](https://github.com/DataDog/dd-trace-py) | 450 |
|[Azure-Samples/miyagi](https://github.com/Azure-Samples/miyagi) | 449 |
|[poe-platform/poe-protocol](https://github.com/poe-platform/poe-protocol) | 447 |
|[onlyphantom/llm-python](https://github.com/onlyphantom/llm-python) | 446 |
|[junruxiong/IncarnaMind](https://github.com/junruxiong/IncarnaMind) | 441 |
|[CarperAI/OpenELM](https://github.com/CarperAI/OpenELM) | 441 |
|[daodao97/chatdoc](https://github.com/daodao97/chatdoc) | 437 |
|[showlab/VLog](https://github.com/showlab/VLog) | 436 |
|[wandb/weave](https://github.com/wandb/weave) | 420 |
|[QwenLM/Qwen-Agent](https://github.com/QwenLM/Qwen-Agent) | 419 |
|[huchenxucs/ChatDB](https://github.com/huchenxucs/ChatDB) | 416 |
|[jerlendds/osintbuddy](https://github.com/jerlendds/osintbuddy) | 411 |
|[monarch-initiative/ontogpt](https://github.com/monarch-initiative/ontogpt) | 408 |
|[mallorbc/Finetune_LLMs](https://github.com/mallorbc/Finetune_LLMs) | 406 |
|[JayZeeDesign/researcher-gpt](https://github.com/JayZeeDesign/researcher-gpt) | 405 |
|[rsaryev/talk-codebase](https://github.com/rsaryev/talk-codebase) | 401 |
|[langchain-ai/langsmith-cookbook](https://github.com/langchain-ai/langsmith-cookbook) | 398 |
|[mtenenholtz/chat-twitter](https://github.com/mtenenholtz/chat-twitter) | 398 |
|[morpheuslord/GPT_Vuln-analyzer](https://github.com/morpheuslord/GPT_Vuln-analyzer) | 391 |
|[MagnivOrg/prompt-layer-library](https://github.com/MagnivOrg/prompt-layer-library) | 387 |
|[JohnSnowLabs/langtest](https://github.com/JohnSnowLabs/langtest) | 384 |
|[mrwadams/attackgen](https://github.com/mrwadams/attackgen) | 381 |
|[codefuse-ai/Test-Agent](https://github.com/codefuse-ai/Test-Agent) | 380 |
|[personoids/personoids-lite](https://github.com/personoids/personoids-lite) | 379 |
|[mosaicml/examples](https://github.com/mosaicml/examples) | 378 |
|[steamship-packages/langchain-production-starter](https://github.com/steamship-packages/langchain-production-starter) | 370 |
|[FlagAI-Open/Aquila2](https://github.com/FlagAI-Open/Aquila2) | 365 |
|[Mintplex-Labs/vector-admin](https://github.com/Mintplex-Labs/vector-admin) | 365 |
|[NimbleBoxAI/ChainFury](https://github.com/NimbleBoxAI/ChainFury) | 357 |
|[BlackHC/llm-strategy](https://github.com/BlackHC/llm-strategy) | 354 |
|[lilacai/lilac](https://github.com/lilacai/lilac) | 352 |
|[preset-io/promptimize](https://github.com/preset-io/promptimize) | 351 |
|[yuanjie-ai/ChatLLM](https://github.com/yuanjie-ai/ChatLLM) | 347 |
|[andylokandy/gpt-4-search](https://github.com/andylokandy/gpt-4-search) | 346 |
|[zhoudaquan/ChatAnything](https://github.com/zhoudaquan/ChatAnything) | 343 |
|[rgomezcasas/dotfiles](https://github.com/rgomezcasas/dotfiles) | 343 |
|[tigerlab-ai/tiger](https://github.com/tigerlab-ai/tiger) | 342 |
|[HumanSignal/label-studio-ml-backend](https://github.com/HumanSignal/label-studio-ml-backend) | 334 |
|[nasa-petal/bidara](https://github.com/nasa-petal/bidara) | 334 |
|[momegas/megabots](https://github.com/momegas/megabots) | 334 |
|[Cheems-Seminar/grounded-segment-any-parts](https://github.com/Cheems-Seminar/grounded-segment-any-parts) | 330 |
|[CambioML/pykoi](https://github.com/CambioML/pykoi) | 326 |
|[Nuggt-dev/Nuggt](https://github.com/Nuggt-dev/Nuggt) | 326 |
|[wandb/edu](https://github.com/wandb/edu) | 326 |
|[Haste171/langchain-chatbot](https://github.com/Haste171/langchain-chatbot) | 324 |
|[sugarforever/LangChain-Tutorials](https://github.com/sugarforever/LangChain-Tutorials) | 322 |
|[liangwq/Chatglm_lora_multi-gpu](https://github.com/liangwq/Chatglm_lora_multi-gpu) | 321 |
|[ur-whitelab/chemcrow-public](https://github.com/ur-whitelab/chemcrow-public) | 320 |
|[itamargol/openai](https://github.com/itamargol/openai) | 318 |
|[gia-guar/JARVIS-ChatGPT](https://github.com/gia-guar/JARVIS-ChatGPT) | 304 |
|[SpecterOps/Nemesis](https://github.com/SpecterOps/Nemesis) | 302 |
|[facebookresearch/personal-timeline](https://github.com/facebookresearch/personal-timeline) | 302 |
|[hnawaz007/pythondataanalysis](https://github.com/hnawaz007/pythondataanalysis) | 301 |
|[Chainlit/cookbook](https://github.com/Chainlit/cookbook) | 300 |
|[airobotlab/KoChatGPT](https://github.com/airobotlab/KoChatGPT) | 300 |
|[GPT-Fathom/GPT-Fathom](https://github.com/GPT-Fathom/GPT-Fathom) | 299 |
|[kaarthik108/snowChat](https://github.com/kaarthik108/snowChat) | 299 |
|[kyegomez/swarms](https://github.com/kyegomez/swarms) | 296 |
|[LangStream/langstream](https://github.com/LangStream/langstream) | 295 |
|[genia-dev/GeniA](https://github.com/genia-dev/GeniA) | 294 |
|[shamspias/customizable-gpt-chatbot](https://github.com/shamspias/customizable-gpt-chatbot) | 291 |
|[TsinghuaDatabaseGroup/DB-GPT](https://github.com/TsinghuaDatabaseGroup/DB-GPT) | 290 |
|[conceptofmind/toolformer](https://github.com/conceptofmind/toolformer) | 283 |
|[sullivan-sean/chat-langchainjs](https://github.com/sullivan-sean/chat-langchainjs) | 283 |
|[AutoPackAI/beebot](https://github.com/AutoPackAI/beebot) | 282 |
|[pablomarin/GPT-Azure-Search-Engine](https://github.com/pablomarin/GPT-Azure-Search-Engine) | 282 |
|[gkamradt/LLMTest_NeedleInAHaystack](https://github.com/gkamradt/LLMTest_NeedleInAHaystack) | 280 |
|[gustavz/DataChad](https://github.com/gustavz/DataChad) | 280 |
|[Safiullah-Rahu/CSV-AI](https://github.com/Safiullah-Rahu/CSV-AI) | 278 |
|[hwchase17/chroma-langchain](https://github.com/hwchase17/chroma-langchain) | 275 |
|[AkshitIreddy/Interactive-LLM-Powered-NPCs](https://github.com/AkshitIreddy/Interactive-LLM-Powered-NPCs) | 268 |
|[ennucore/clippinator](https://github.com/ennucore/clippinator) | 267 |
|[artitw/text2text](https://github.com/artitw/text2text) | 264 |
|[anarchy-ai/LLM-VM](https://github.com/anarchy-ai/LLM-VM) | 263 |
|[wpydcr/LLM-Kit](https://github.com/wpydcr/LLM-Kit) | 262 |
|[streamlit/llm-examples](https://github.com/streamlit/llm-examples) | 262 |
|[paolorechia/learn-langchain](https://github.com/paolorechia/learn-langchain) | 262 |
|[yym68686/ChatGPT-Telegram-Bot](https://github.com/yym68686/ChatGPT-Telegram-Bot) | 261 |
|[PradipNichite/Youtube-Tutorials](https://github.com/PradipNichite/Youtube-Tutorials) | 259 |
|[radi-cho/datasetGPT](https://github.com/radi-cho/datasetGPT) | 259 |
|[ur-whitelab/exmol](https://github.com/ur-whitelab/exmol) | 259 |
|[ml6team/fondant](https://github.com/ml6team/fondant) | 254 |
|[bborn/howdoi.ai](https://github.com/bborn/howdoi.ai) | 254 |
|[rahulnyk/knowledge_graph](https://github.com/rahulnyk/knowledge_graph) | 253 |
|[recalign/RecAlign](https://github.com/recalign/RecAlign) | 248 |
|[hwchase17/langchain-streamlit-template](https://github.com/hwchase17/langchain-streamlit-template) | 248 |
|[fetchai/uAgents](https://github.com/fetchai/uAgents) | 247 |
|[arthur-ai/bench](https://github.com/arthur-ai/bench) | 247 |
|[miaoshouai/miaoshouai-assistant](https://github.com/miaoshouai/miaoshouai-assistant) | 246 |
|[RoboCoachTechnologies/GPT-Synthesizer](https://github.com/RoboCoachTechnologies/GPT-Synthesizer) | 244 |
|[langchain-ai/web-explorer](https://github.com/langchain-ai/web-explorer) | 242 |
|[kaleido-lab/dolphin](https://github.com/kaleido-lab/dolphin) | 242 |
|[PJLab-ADG/DriveLikeAHuman](https://github.com/PJLab-ADG/DriveLikeAHuman) | 241 |
|[stepanogil/autonomous-hr-chatbot](https://github.com/stepanogil/autonomous-hr-chatbot) | 238 |
|[WongSaang/chatgpt-ui-server](https://github.com/WongSaang/chatgpt-ui-server) | 236 |
|[nexus-stc/stc](https://github.com/nexus-stc/stc) | 235 |
|[yeagerai/genworlds](https://github.com/yeagerai/genworlds) | 235 |
|[Gentopia-AI/Gentopia](https://github.com/Gentopia-AI/Gentopia) | 235 |
|[alphasecio/langchain-examples](https://github.com/alphasecio/langchain-examples) | 235 |
|[grumpyp/aixplora](https://github.com/grumpyp/aixplora) | 232 |
|[shaman-ai/agent-actors](https://github.com/shaman-ai/agent-actors) | 232 |
|[darrenburns/elia](https://github.com/darrenburns/elia) | 231 |
|[orgexyz/BlockAGI](https://github.com/orgexyz/BlockAGI) | 231 |
|[handrew/browserpilot](https://github.com/handrew/browserpilot) | 226 |
|[su77ungr/CASALIOY](https://github.com/su77ungr/CASALIOY) | 225 |
|[nicknochnack/LangchainDocuments](https://github.com/nicknochnack/LangchainDocuments) | 225 |
|[dbpunk-labs/octogen](https://github.com/dbpunk-labs/octogen) | 224 |
|[langchain-ai/weblangchain](https://github.com/langchain-ai/weblangchain) | 222 |
|[CL-lau/SQL-GPT](https://github.com/CL-lau/SQL-GPT) | 222 |
|[alvarosevilla95/autolang](https://github.com/alvarosevilla95/autolang) | 221 |
|[showlab/UniVTG](https://github.com/showlab/UniVTG) | 220 |
|[edreisMD/plugnplai](https://github.com/edreisMD/plugnplai) | 219 |
|[hardbyte/qabot](https://github.com/hardbyte/qabot) | 216 |
|[microsoft/azure-openai-in-a-day-workshop](https://github.com/microsoft/azure-openai-in-a-day-workshop) | 215 |
|[Azure-Samples/chat-with-your-data-solution-accelerator](https://github.com/Azure-Samples/chat-with-your-data-solution-accelerator) | 214 |
|[amadad/agentcy](https://github.com/amadad/agentcy) | 213 |
|[snexus/llm-search](https://github.com/snexus/llm-search) | 212 |
|[afaqueumer/DocQA](https://github.com/afaqueumer/DocQA) | 206 |
|[plchld/InsightFlow](https://github.com/plchld/InsightFlow) | 205 |
|[yasyf/compress-gpt](https://github.com/yasyf/compress-gpt) | 205 |
|[benthecoder/ClassGPT](https://github.com/benthecoder/ClassGPT) | 205 |
|[voxel51/voxelgpt](https://github.com/voxel51/voxelgpt) | 204 |
|[jbrukh/gpt-jargon](https://github.com/jbrukh/gpt-jargon) | 204 |
|[emarco177/ice_breaker](https://github.com/emarco177/ice_breaker) | 204 |
|[tencentmusic/supersonic](https://github.com/tencentmusic/supersonic) | 202 |
|[Azure-Samples/azure-search-power-skills](https://github.com/Azure-Samples/azure-search-power-skills) | 202 |
|[blob42/Instrukt](https://github.com/blob42/Instrukt) | 201 |
|[langchain-ai/langsmith-sdk](https://github.com/langchain-ai/langsmith-sdk) | 200 |
|[SamPink/dev-gpt](https://github.com/SamPink/dev-gpt) | 200 |
|[ju-bezdek/langchain-decorators](https://github.com/ju-bezdek/langchain-decorators) | 198 |
|[KMnO4-zx/huanhuan-chat](https://github.com/KMnO4-zx/huanhuan-chat) | 196 |
|[Azure-Samples/jp-azureopenai-samples](https://github.com/Azure-Samples/jp-azureopenai-samples) | 192 |
|[hongbo-miao/hongbomiao.com](https://github.com/hongbo-miao/hongbomiao.com) | 190 |
|[CakeCrusher/openplugin](https://github.com/CakeCrusher/openplugin) | 190 |
|[PaddlePaddle/ERNIE-Bot-SDK](https://github.com/PaddlePaddle/ERNIE-Bot-SDK) | 189 |
|[retr0reg/Ret2GPT](https://github.com/retr0reg/Ret2GPT) | 189 |
|[AmineDiro/cria](https://github.com/AmineDiro/cria) | 187 |
|[lancedb/vectordb-recipes](https://github.com/lancedb/vectordb-recipes) | 186 |
|[vaibkumr/prompt-optimizer](https://github.com/vaibkumr/prompt-optimizer) | 185 |
|[aws-ia/ecs-blueprints](https://github.com/aws-ia/ecs-blueprints) | 184 |
|[ethanyanjiali/minChatGPT](https://github.com/ethanyanjiali/minChatGPT) | 183 |
|[MuhammadMoinFaisal/LargeLanguageModelsProjects](https://github.com/MuhammadMoinFaisal/LargeLanguageModelsProjects) | 182 |
|[shauryr/S2QA](https://github.com/shauryr/S2QA) | 181 |
|[summarizepaper/summarizepaper](https://github.com/summarizepaper/summarizepaper) | 180 |
|[NomaDamas/RAGchain](https://github.com/NomaDamas/RAGchain) | 179 |
|[pnkvalavala/repochat](https://github.com/pnkvalavala/repochat) | 179 |
|[ibiscp/LLM-IMDB](https://github.com/ibiscp/LLM-IMDB) | 177 |
|[fengyuli-dev/multimedia-gpt](https://github.com/fengyuli-dev/multimedia-gpt) | 177 |
|[langchain-ai/text-split-explorer](https://github.com/langchain-ai/text-split-explorer) | 175 |
|[iMagist486/ElasticSearch-Langchain-Chatglm2](https://github.com/iMagist486/ElasticSearch-Langchain-Chatglm2) | 175 |
|[limaoyi1/Auto-PPT](https://github.com/limaoyi1/Auto-PPT) | 175 |
|[Open-Swarm-Net/GPT-Swarm](https://github.com/Open-Swarm-Net/GPT-Swarm) | 175 |
|[morpheuslord/HackBot](https://github.com/morpheuslord/HackBot) | 174 |
|[v7labs/benchllm](https://github.com/v7labs/benchllm) | 174 |
|[Coding-Crashkurse/Langchain-Full-Course](https://github.com/Coding-Crashkurse/Langchain-Full-Course) | 174 |
|[dongyh20/Octopus](https://github.com/dongyh20/Octopus) | 173 |
|[kimtth/azure-openai-llm-vector-langchain](https://github.com/kimtth/azure-openai-llm-vector-langchain) | 173 |
|[mayooear/private-chatbot-mpt30b-langchain](https://github.com/mayooear/private-chatbot-mpt30b-langchain) | 173 |
|[zilliztech/akcio](https://github.com/zilliztech/akcio) | 172 |
|[jmpaz/promptlib](https://github.com/jmpaz/promptlib) | 172 |
|[ccurme/yolopandas](https://github.com/ccurme/yolopandas) | 172 |
|[joaomdmoura/CrewAI](https://github.com/joaomdmoura/CrewAI) | 170 |
|[katanaml/llm-mistral-invoice-cpu](https://github.com/katanaml/llm-mistral-invoice-cpu) | 170 |
|[chakkaradeep/pyCodeAGI](https://github.com/chakkaradeep/pyCodeAGI) | 170 |
|[mudler/LocalAGI](https://github.com/mudler/LocalAGI) | 167 |
|[dssjon/biblos](https://github.com/dssjon/biblos) | 165 |
|[kjappelbaum/gptchem](https://github.com/kjappelbaum/gptchem) | 165 |
|[xxw1995/chatglm3-finetune](https://github.com/xxw1995/chatglm3-finetune) | 164 |
|[ArjanCodes/examples](https://github.com/ArjanCodes/examples) | 163 |
|[AIAnytime/Llama2-Medical-Chatbot](https://github.com/AIAnytime/Llama2-Medical-Chatbot) | 163 |
|[RCGAI/SimplyRetrieve](https://github.com/RCGAI/SimplyRetrieve) | 162 |
|[langchain-ai/langchain-teacher](https://github.com/langchain-ai/langchain-teacher) | 162 |
|[menloparklab/falcon-langchain](https://github.com/menloparklab/falcon-langchain) | 162 |
|[flurb18/AgentOoba](https://github.com/flurb18/AgentOoba) | 162 |
|[homanp/vercel-langchain](https://github.com/homanp/vercel-langchain) | 161 |
|[jiran214/langup-ai](https://github.com/jiran214/langup-ai) | 160 |
|[JorisdeJong123/7-Days-of-LangChain](https://github.com/JorisdeJong123/7-Days-of-LangChain) | 160 |
|[GoogleCloudPlatform/data-analytics-golden-demo](https://github.com/GoogleCloudPlatform/data-analytics-golden-demo) | 159 |
|[positive666/Prompt-Can-Anything](https://github.com/positive666/Prompt-Can-Anything) | 159 |
|[luisroque/large_laguage_models](https://github.com/luisroque/large_laguage_models) | 159 |
|[mlops-for-all/mlops-for-all.github.io](https://github.com/mlops-for-all/mlops-for-all.github.io) | 158 |
|[wandb/wandbot](https://github.com/wandb/wandbot) | 158 |
|[elastic/elasticsearch-labs](https://github.com/elastic/elasticsearch-labs) | 157 |
|[shroominic/funcchain](https://github.com/shroominic/funcchain) | 157 |
|[deeppavlov/dream](https://github.com/deeppavlov/dream) | 156 |
|[mluogh/eastworld](https://github.com/mluogh/eastworld) | 154 |
|[georgesung/llm_qlora](https://github.com/georgesung/llm_qlora) | 154 |
|[RUC-GSAI/YuLan-Rec](https://github.com/RUC-GSAI/YuLan-Rec) | 153 |
|[KylinC/ChatFinance](https://github.com/KylinC/ChatFinance) | 152 |
|[Dicklesworthstone/llama2_aided_tesseract](https://github.com/Dicklesworthstone/llama2_aided_tesseract) | 152 |
|[c0sogi/LLMChat](https://github.com/c0sogi/LLMChat) | 152 |
|[eunomia-bpf/GPTtrace](https://github.com/eunomia-bpf/GPTtrace) | 152 |
|[ErikBjare/gptme](https://github.com/ErikBjare/gptme) | 152 |
|[Klingefjord/chatgpt-telegram](https://github.com/Klingefjord/chatgpt-telegram) | 152 |
|[RoboCoachTechnologies/ROScribe](https://github.com/RoboCoachTechnologies/ROScribe) | 151 |
|[Aggregate-Intellect/sherpa](https://github.com/Aggregate-Intellect/sherpa) | 151 |
|[3Alan/DocsMind](https://github.com/3Alan/DocsMind) | 151 |
|[tangqiaoyu/ToolAlpaca](https://github.com/tangqiaoyu/ToolAlpaca) | 150 |
|[kulltc/chatgpt-sql](https://github.com/kulltc/chatgpt-sql) | 150 |
|[mallahyari/drqa](https://github.com/mallahyari/drqa) | 150 |
|[MedalCollector/Orator](https://github.com/MedalCollector/Orator) | 149 |
|[Teahouse-Studios/akari-bot](https://github.com/Teahouse-Studios/akari-bot) | 149 |
|[realminchoi/babyagi-ui](https://github.com/realminchoi/babyagi-ui) | 148 |
|[ssheng/BentoChain](https://github.com/ssheng/BentoChain) | 148 |
|[lmstudio-ai/examples](https://github.com/lmstudio-ai/examples) | 147 |
|[solana-labs/chatgpt-plugin](https://github.com/solana-labs/chatgpt-plugin) | 147 |
|[aurelio-labs/arxiv-bot](https://github.com/aurelio-labs/arxiv-bot) | 147 |
|[Jaseci-Labs/jaseci](https://github.com/Jaseci-Labs/jaseci) | 146 |
|[menloparklab/langchain-cohere-qdrant-doc-retrieval](https://github.com/menloparklab/langchain-cohere-qdrant-doc-retrieval) | 146 |
|[trancethehuman/entities-extraction-web-scraper](https://github.com/trancethehuman/entities-extraction-web-scraper) | 144 |
|[peterw/StoryStorm](https://github.com/peterw/StoryStorm) | 144 |
|[grumpyp/chroma-langchain-tutorial](https://github.com/grumpyp/chroma-langchain-tutorial) | 144 |
|[gh18l/CrawlGPT](https://github.com/gh18l/CrawlGPT) | 142 |
|[langchain-ai/langchain-aws-template](https://github.com/langchain-ai/langchain-aws-template) | 142 |
|[yasyf/summ](https://github.com/yasyf/summ) | 141 |
|[petehunt/langchain-github-bot](https://github.com/petehunt/langchain-github-bot) | 141 |
|[hirokidaichi/wanna](https://github.com/hirokidaichi/wanna) | 140 |
|[jina-ai/fastapi-serve](https://github.com/jina-ai/fastapi-serve) | 139 |
|[zenml-io/zenml-projects](https://github.com/zenml-io/zenml-projects) | 139 |
|[jlonge4/local_llama](https://github.com/jlonge4/local_llama) | 139 |
|[smyja/blackmaria](https://github.com/smyja/blackmaria) | 138 |
|[ChuloAI/BrainChulo](https://github.com/ChuloAI/BrainChulo) | 137 |
|[log1stics/voice-generator-webui](https://github.com/log1stics/voice-generator-webui) | 137 |
|[davila7/file-gpt](https://github.com/davila7/file-gpt) | 137 |
|[dcaribou/transfermarkt-datasets](https://github.com/dcaribou/transfermarkt-datasets) | 136 |
|[ciare-robotics/world-creator](https://github.com/ciare-robotics/world-creator) | 135 |
|[Undertone0809/promptulate](https://github.com/Undertone0809/promptulate) | 134 |
|[fixie-ai/fixie-examples](https://github.com/fixie-ai/fixie-examples) | 134 |
|[run-llama/ai-engineer-workshop](https://github.com/run-llama/ai-engineer-workshop) | 133 |
|[definitive-io/code-indexer-loop](https://github.com/definitive-io/code-indexer-loop) | 131 |
|[mortium91/langchain-assistant](https://github.com/mortium91/langchain-assistant) | 131 |
|[baidubce/bce-qianfan-sdk](https://github.com/baidubce/bce-qianfan-sdk) | 130 |
|[Ngonie-x/langchain_csv](https://github.com/Ngonie-x/langchain_csv) | 130 |
|[IvanIsCoding/ResuLLMe](https://github.com/IvanIsCoding/ResuLLMe) | 130 |
|[AnchoringAI/anchoring-ai](https://github.com/AnchoringAI/anchoring-ai) | 129 |
|[Azure/business-process-automation](https://github.com/Azure/business-process-automation) | 128 |
|[athina-ai/athina-sdk](https://github.com/athina-ai/athina-sdk) | 126 |
|[thunlp/ChatEval](https://github.com/thunlp/ChatEval) | 126 |
|[prof-frink-lab/slangchain](https://github.com/prof-frink-lab/slangchain) | 126 |
|[vietanhdev/pautobot](https://github.com/vietanhdev/pautobot) | 125 |
|[awslabs/generative-ai-cdk-constructs](https://github.com/awslabs/generative-ai-cdk-constructs) | 124 |
|[sdaaron/QueryGPT](https://github.com/sdaaron/QueryGPT) | 124 |
|[rabbitmetrics/langchain-13-min](https://github.com/rabbitmetrics/langchain-13-min) | 124 |
|[AutoLLM/AutoAgents](https://github.com/AutoLLM/AutoAgents) | 122 |
|[nicknochnack/Nopenai](https://github.com/nicknochnack/Nopenai) | 122 |
|[wombyz/HormoziGPT](https://github.com/wombyz/HormoziGPT) | 122 |
|[dotvignesh/PDFChat](https://github.com/dotvignesh/PDFChat) | 122 |
|[topoteretes/PromethAI-Backend](https://github.com/topoteretes/PromethAI-Backend) | 121 |
|[nftblackmagic/flask-langchain](https://github.com/nftblackmagic/flask-langchain) | 121 |
|[vishwasg217/finsight](https://github.com/vishwasg217/finsight) | 120 |
|[snap-stanford/MLAgentBench](https://github.com/snap-stanford/MLAgentBench) | 120 |
|[Azure/app-service-linux-docs](https://github.com/Azure/app-service-linux-docs) | 120 |
|[nyanp/chat2plot](https://github.com/nyanp/chat2plot) | 120 |
|[ant4g0nist/polar](https://github.com/ant4g0nist/polar) | 119 |
|[aws-samples/cdk-eks-blueprints-patterns](https://github.com/aws-samples/cdk-eks-blueprints-patterns) | 119 |
|[aws-samples/amazon-kendra-langchain-extensions](https://github.com/aws-samples/amazon-kendra-langchain-extensions) | 119 |
|[Xueheng-Li/SynologyChatbotGPT](https://github.com/Xueheng-Li/SynologyChatbotGPT) | 119 |
|[CodeAlchemyAI/ViLT-GPT](https://github.com/CodeAlchemyAI/ViLT-GPT) | 117 |
|[Lin-jun-xiang/docGPT-langchain](https://github.com/Lin-jun-xiang/docGPT-langchain) | 117 |
|[ademakdogan/ChatSQL](https://github.com/ademakdogan/ChatSQL) | 116 |
|[aniketmaurya/llm-inference](https://github.com/aniketmaurya/llm-inference) | 115 |
|[xuwenhao/mactalk-ai-course](https://github.com/xuwenhao/mactalk-ai-course) | 115 |
|[cmooredev/RepoReader](https://github.com/cmooredev/RepoReader) | 115 |
|[abi/autocommit](https://github.com/abi/autocommit) | 115 |
|[MIDORIBIN/langchain-gpt4free](https://github.com/MIDORIBIN/langchain-gpt4free) | 114 |
|[finaldie/auto-news](https://github.com/finaldie/auto-news) | 114 |
|[Anil-matcha/Youtube-to-chatbot](https://github.com/Anil-matcha/Youtube-to-chatbot) | 114 |
|[avrabyt/MemoryBot](https://github.com/avrabyt/MemoryBot) | 114 |
|[Capsize-Games/airunner](https://github.com/Capsize-Games/airunner) | 113 |
|[atisharma/llama_farm](https://github.com/atisharma/llama_farm) | 113 |
|[mbchang/data-driven-characters](https://github.com/mbchang/data-driven-characters) | 112 |
|[fiddler-labs/fiddler-auditor](https://github.com/fiddler-labs/fiddler-auditor) | 112 |
|[dirkjbreeuwer/gpt-automated-web-scraper](https://github.com/dirkjbreeuwer/gpt-automated-web-scraper) | 111 |
|[Appointat/Chat-with-Document-s-using-ChatGPT-API-and-Text-Embedding](https://github.com/Appointat/Chat-with-Document-s-using-ChatGPT-API-and-Text-Embedding) | 111 |
|[hwchase17/langchain-gradio-template](https://github.com/hwchase17/langchain-gradio-template) | 111 |
|[artas728/spelltest](https://github.com/artas728/spelltest) | 110 |
|[NVIDIA/GenerativeAIExamples](https://github.com/NVIDIA/GenerativeAIExamples) | 109 |
|[Azure/aistudio-copilot-sample](https://github.com/Azure/aistudio-copilot-sample) | 108 |
|[codefuse-ai/codefuse-chatbot](https://github.com/codefuse-ai/codefuse-chatbot) | 108 |
|[apirrone/Memento](https://github.com/apirrone/Memento) | 108 |
|[e-johnstonn/GPT-Doc-Summarizer](https://github.com/e-johnstonn/GPT-Doc-Summarizer) | 108 |
|[salesforce/BOLAA](https://github.com/salesforce/BOLAA) | 107 |
|[Erol444/gpt4-openai-api](https://github.com/Erol444/gpt4-openai-api) | 106 |
|[linjungz/chat-with-your-doc](https://github.com/linjungz/chat-with-your-doc) | 106 |
|[crosleythomas/MirrorGPT](https://github.com/crosleythomas/MirrorGPT) | 106 |
|[panaverse/learn-generative-ai](https://github.com/panaverse/learn-generative-ai) | 105 |
|[Azure/azure-sdk-tools](https://github.com/Azure/azure-sdk-tools) | 105 |
|[malywut/gpt_examples](https://github.com/malywut/gpt_examples) | 105 |
|[ritun16/chain-of-verification](https://github.com/ritun16/chain-of-verification) | 104 |
|[langchain-ai/langchain-benchmarks](https://github.com/langchain-ai/langchain-benchmarks) | 104 |
|[lightninglabs/LangChainBitcoin](https://github.com/lightninglabs/LangChainBitcoin) | 104 |
|[flepied/second-brain-agent](https://github.com/flepied/second-brain-agent) | 103 |
|[llmapp/openai.mini](https://github.com/llmapp/openai.mini) | 102 |
|[gimlet-ai/tddGPT](https://github.com/gimlet-ai/tddGPT) | 102 |
|[jlonge4/gpt_chatwithPDF](https://github.com/jlonge4/gpt_chatwithPDF) | 102 |
|[agentification/RAFA_code](https://github.com/agentification/RAFA_code) | 101 |
|[pacman100/DHS-LLM-Workshop](https://github.com/pacman100/DHS-LLM-Workshop) | 101 |
|[aws-samples/private-llm-qa-bot](https://github.com/aws-samples/private-llm-qa-bot) | 101 |
_Generated by [github-dependents-info](https://github.com/nvuillam/github-dependents-info)_
`github-dependents-info --repo "langchain-ai/langchain" --markdownfile dependents.md --minstars 100 --sort stars`

View File

@@ -0,0 +1,50 @@
# Tutorials
## Books and Handbooks
- [Generative AI with LangChain](https://www.amazon.com/Generative-AI-LangChain-language-ChatGPT/dp/1835083463/ref=sr_1_1?crid=1GMOMH0G7GLR&keywords=generative+ai+with+langchain&qid=1703247181&sprefix=%2Caps%2C298&sr=8-1) by [Ben Auffrath](https://www.amazon.com/stores/Ben-Auffarth/author/B08JQKSZ7D?ref=ap_rdr&store_ref=ap_rdr&isDramIntegrated=true&shoppingPortalEnabled=true), ©️ 2023 Packt Publishing
- [LangChain AI Handbook](https://www.pinecone.io/learn/langchain/) By **James Briggs** and **Francisco Ingham**
- [LangChain Cheatsheet](https://pub.towardsai.net/langchain-cheatsheet-all-secrets-on-a-single-page-8be26b721cde) by **Ivan Reznikov**
## Tutorials
### [by Greg Kamradt](https://www.youtube.com/playlist?list=PLqZXAkvF1bPNQER9mLmDbntNfSpzdDIU5)
### [by Sam Witteveen](https://www.youtube.com/playlist?list=PL8motc6AQftk1Bs42EW45kwYbyJ4jOdiZ)
### [by James Briggs](https://www.youtube.com/playlist?list=PLIUOU7oqGTLieV9uTIFMm6_4PXg-hlN6F)
### [by Prompt Engineering](https://www.youtube.com/playlist?list=PLVEEucA9MYhOu89CX8H3MBZqayTbcCTMr)
### [by Mayo Oshin](https://www.youtube.com/@chatwithdata/search?query=langchain)
### [by 1 little Coder](https://www.youtube.com/playlist?list=PLpdmBGJ6ELUK-v0MK-t4wZmVEbxM5xk6L)
## Courses
### Featured courses on Deeplearning.AI
- [LangChain for LLM Application Development](https://learn.deeplearning.ai/langchain)
- [LangChain Chat with Your Data](https://learn.deeplearning.ai/langchain-chat-with-your-data)
- [Functions, Tools and Agents with LangChain](https://learn.deeplearning.ai/functions-tools-agents-langchain)
- [Build LLM Apps with LangChain.js](https://learn.deeplearning.ai/courses/build-llm-apps-with-langchain-js)
### Online courses
- [Udemy](https://www.udemy.com/courses/search/?q=langchain)
- [Pluralsight](https://www.pluralsight.com/search?q=langchain)
- [Coursera](https://www.coursera.org/search?query=langchain)
- [Maven](https://maven.com/courses?query=langchain)
- [Udacity](https://www.udacity.com/catalog/all/any-price/any-school/any-skill/any-difficulty/any-duration/any-type/relevance/page-1?searchValue=langchain)
- [LinkedIn Learning](https://www.linkedin.com/search/results/learning/?keywords=langchain)
- [edX](https://www.edx.org/search?q=langchain)
## Short Tutorials
- [by Nicholas Renotte](https://youtu.be/MlK6SIjcjE8)
- [by Patrick Loeber](https://youtu.be/LbT1yp6quS8)
- [by Rabbitmetrics](https://youtu.be/aywZrzNaKjs)
- [by Ivan Reznikov](https://medium.com/@ivanreznikov/langchain-101-course-updated-668f7b41d6cb)
## [Documentation: Use cases](/docs/use_cases)
---------------------

View File

@@ -0,0 +1,137 @@
# YouTube videos
⛓ icon marks a new addition [last update 2023-09-21]
### [Official LangChain YouTube channel](https://www.youtube.com/@LangChain)
### Introduction to LangChain with Harrison Chase, creator of LangChain
- [Building the Future with LLMs, `LangChain`, & `Pinecone`](https://youtu.be/nMniwlGyX-c) by [Pinecone](https://www.youtube.com/@pinecone-io)
- [LangChain and Weaviate with Harrison Chase and Bob van Luijt - Weaviate Podcast #36](https://youtu.be/lhby7Ql7hbk) by [Weaviate • Vector Database](https://www.youtube.com/@Weaviate)
- [LangChain Demo + Q&A with Harrison Chase](https://youtu.be/zaYTXQFR0_s?t=788) by [Full Stack Deep Learning](https://www.youtube.com/@FullStackDeepLearning)
- [LangChain Agents: Build Personal Assistants For Your Data (Q&A with Harrison Chase and Mayo Oshin)](https://youtu.be/gVkF8cwfBLI) by [Chat with data](https://www.youtube.com/@chatwithdata)
## Videos (sorted by views)
- [Using `ChatGPT` with YOUR OWN Data. This is magical. (LangChain OpenAI API)](https://youtu.be/9AXP7tCI9PI) by [TechLead](https://www.youtube.com/@TechLead)
- [First look - `ChatGPT` + `WolframAlpha` (`GPT-3.5` and Wolfram|Alpha via LangChain by James Weaver)](https://youtu.be/wYGbY811oMo) by [Dr Alan D. Thompson](https://www.youtube.com/@DrAlanDThompson)
- [LangChain explained - The hottest new Python framework](https://youtu.be/RoR4XJw8wIc) by [AssemblyAI](https://www.youtube.com/@AssemblyAI)
- [Chatbot with INFINITE MEMORY using `OpenAI` & `Pinecone` - `GPT-3`, `Embeddings`, `ADA`, `Vector DB`, `Semantic`](https://youtu.be/2xNzB7xq8nk) by [David Shapiro ~ AI](https://www.youtube.com/@DavidShapiroAutomator)
- [LangChain for LLMs is... basically just an Ansible playbook](https://youtu.be/X51N9C-OhlE) by [David Shapiro ~ AI](https://www.youtube.com/@DavidShapiroAutomator)
- [Build your own LLM Apps with LangChain & `GPT-Index`](https://youtu.be/-75p09zFUJY) by [1littlecoder](https://www.youtube.com/@1littlecoder)
- [`BabyAGI` - New System of Autonomous AI Agents with LangChain](https://youtu.be/lg3kJvf1kXo) by [1littlecoder](https://www.youtube.com/@1littlecoder)
- [Run `BabyAGI` with Langchain Agents (with Python Code)](https://youtu.be/WosPGHPObx8) by [1littlecoder](https://www.youtube.com/@1littlecoder)
- [How to Use Langchain With `Zapier` | Write and Send Email with GPT-3 | OpenAI API Tutorial](https://youtu.be/p9v2-xEa9A0) by [StarMorph AI](https://www.youtube.com/@starmorph)
- [Use Your Locally Stored Files To Get Response From GPT - `OpenAI` | Langchain | Python](https://youtu.be/NC1Ni9KS-rk) by [Shweta Lodha](https://www.youtube.com/@shweta-lodha)
- [`Langchain JS` | How to Use GPT-3, GPT-4 to Reference your own Data | `OpenAI Embeddings` Intro](https://youtu.be/veV2I-NEjaM) by [StarMorph AI](https://www.youtube.com/@starmorph)
- [The easiest way to work with large language models | Learn LangChain in 10min](https://youtu.be/kmbS6FDQh7c) by [Sophia Yang](https://www.youtube.com/@SophiaYangDS)
- [4 Autonomous AI Agents: “Westworld” simulation `BabyAGI`, `AutoGPT`, `Camel`, `LangChain`](https://youtu.be/yWbnH6inT_U) by [Sophia Yang](https://www.youtube.com/@SophiaYangDS)
- [AI CAN SEARCH THE INTERNET? Langchain Agents + OpenAI ChatGPT](https://youtu.be/J-GL0htqda8) by [tylerwhatsgood](https://www.youtube.com/@tylerwhatsgood)
- [Query Your Data with GPT-4 | Embeddings, Vector Databases | Langchain JS Knowledgebase](https://youtu.be/jRnUPUTkZmU) by [StarMorph AI](https://www.youtube.com/@starmorph)
- [`Weaviate` + LangChain for LLM apps presented by Erika Cardenas](https://youtu.be/7AGj4Td5Lgw) by [`Weaviate` • Vector Database](https://www.youtube.com/@Weaviate)
- [Langchain Overview — How to Use Langchain & `ChatGPT`](https://youtu.be/oYVYIq0lOtI) by [Python In Office](https://www.youtube.com/@pythoninoffice6568)
- [Langchain Overview - How to Use Langchain & `ChatGPT`](https://youtu.be/oYVYIq0lOtI) by [Python In Office](https://www.youtube.com/@pythoninoffice6568)
- [LangChain Tutorials](https://www.youtube.com/watch?v=FuqdVNB_8c0&list=PL9V0lbeJ69brU-ojMpU1Y7Ic58Tap0Cw6) by [Edrick](https://www.youtube.com/@edrickdch):
- [LangChain, Chroma DB, OpenAI Beginner Guide | ChatGPT with your PDF](https://youtu.be/FuqdVNB_8c0)
- [LangChain 101: The Complete Beginner's Guide](https://youtu.be/P3MAbZ2eMUI)
- [Custom langchain Agent & Tools with memory. Turn any `Python function` into langchain tool with Gpt 3](https://youtu.be/NIG8lXk0ULg) by [echohive](https://www.youtube.com/@echohive)
- [Building AI LLM Apps with LangChain (and more?) - LIVE STREAM](https://www.youtube.com/live/M-2Cj_2fzWI?feature=share) by [Nicholas Renotte](https://www.youtube.com/@NicholasRenotte)
- [`ChatGPT` with any `YouTube` video using langchain and `chromadb`](https://youtu.be/TQZfB2bzVwU) by [echohive](https://www.youtube.com/@echohive)
- [How to Talk to a `PDF` using LangChain and `ChatGPT`](https://youtu.be/v2i1YDtrIwk) by [Automata Learning Lab](https://www.youtube.com/@automatalearninglab)
- [Langchain Document Loaders Part 1: Unstructured Files](https://youtu.be/O5C0wfsen98) by [Merk](https://www.youtube.com/@merksworld)
- [LangChain - Prompt Templates (what all the best prompt engineers use)](https://youtu.be/1aRu8b0XNOQ) by [Nick Daigler](https://www.youtube.com/@nick_daigs)
- [LangChain. Crear aplicaciones Python impulsadas por GPT](https://youtu.be/DkW_rDndts8) by [Jesús Conde](https://www.youtube.com/@0utKast)
- [Easiest Way to Use GPT In Your Products | LangChain Basics Tutorial](https://youtu.be/fLy0VenZyGc) by [Rachel Woods](https://www.youtube.com/@therachelwoods)
- [`BabyAGI` + `GPT-4` Langchain Agent with Internet Access](https://youtu.be/wx1z_hs5P6E) by [tylerwhatsgood](https://www.youtube.com/@tylerwhatsgood)
- [Learning LLM Agents. How does it actually work? LangChain, AutoGPT & OpenAI](https://youtu.be/mb_YAABSplk) by [Arnoldas Kemeklis](https://www.youtube.com/@processusAI)
- [Get Started with LangChain in `Node.js`](https://youtu.be/Wxx1KUWJFv4) by [Developers Digest](https://www.youtube.com/@DevelopersDigest)
- [LangChain + `OpenAI` tutorial: Building a Q&A system w/ own text data](https://youtu.be/DYOU_Z0hAwo) by [Samuel Chan](https://www.youtube.com/@SamuelChan)
- [Langchain + `Zapier` Agent](https://youtu.be/yribLAb-pxA) by [Merk](https://www.youtube.com/@merksworld)
- [Connecting the Internet with `ChatGPT` (LLMs) using Langchain And Answers Your Questions](https://youtu.be/9Y0TBC63yZg) by [Kamalraj M M](https://www.youtube.com/@insightbuilder)
- [Build More Powerful LLM Applications for Businesss with LangChain (Beginners Guide)](https://youtu.be/sp3-WLKEcBg) by[ No Code Blackbox](https://www.youtube.com/@nocodeblackbox)
- [LangFlow LLM Agent Demo for 🦜🔗LangChain](https://youtu.be/zJxDHaWt-6o) by [Cobus Greyling](https://www.youtube.com/@CobusGreylingZA)
- [Chatbot Factory: Streamline Python Chatbot Creation with LLMs and Langchain](https://youtu.be/eYer3uzrcuM) by [Finxter](https://www.youtube.com/@CobusGreylingZA)
- [LangChain Tutorial - ChatGPT mit eigenen Daten](https://youtu.be/0XDLyY90E2c) by [Coding Crashkurse](https://www.youtube.com/@codingcrashkurse6429)
- [Chat with a `CSV` | LangChain Agents Tutorial (Beginners)](https://youtu.be/tjeti5vXWOU) by [GoDataProf](https://www.youtube.com/@godataprof)
- [Introdução ao Langchain - #Cortes - Live DataHackers](https://youtu.be/fw8y5VRei5Y) by [Prof. João Gabriel Lima](https://www.youtube.com/@profjoaogabriellima)
- [LangChain: Level up `ChatGPT` !? | LangChain Tutorial Part 1](https://youtu.be/vxUGx8aZpDE) by [Code Affinity](https://www.youtube.com/@codeaffinitydev)
- [KI schreibt krasses Youtube Skript 😲😳 | LangChain Tutorial Deutsch](https://youtu.be/QpTiXyK1jus) by [SimpleKI](https://www.youtube.com/@simpleki)
- [Chat with Audio: Langchain, `Chroma DB`, OpenAI, and `Assembly AI`](https://youtu.be/Kjy7cx1r75g) by [AI Anytime](https://www.youtube.com/@AIAnytime)
- [QA over documents with Auto vector index selection with Langchain router chains](https://youtu.be/9G05qybShv8) by [echohive](https://www.youtube.com/@echohive)
- [Build your own custom LLM application with `Bubble.io` & Langchain (No Code & Beginner friendly)](https://youtu.be/O7NhQGu1m6c) by [No Code Blackbox](https://www.youtube.com/@nocodeblackbox)
- [Simple App to Question Your Docs: Leveraging `Streamlit`, `Hugging Face Spaces`, LangChain, and `Claude`!](https://youtu.be/X4YbNECRr7o) by [Chris Alexiuk](https://www.youtube.com/@chrisalexiuk)
- [LANGCHAIN AI- `ConstitutionalChainAI` + Databutton AI ASSISTANT Web App](https://youtu.be/5zIU6_rdJCU) by [Avra](https://www.youtube.com/@Avra_b)
- [LANGCHAIN AI AUTONOMOUS AGENT WEB APP - 👶 `BABY AGI` 🤖 with EMAIL AUTOMATION using `DATABUTTON`](https://youtu.be/cvAwOGfeHgw) by [Avra](https://www.youtube.com/@Avra_b)
- [The Future of Data Analysis: Using A.I. Models in Data Analysis (LangChain)](https://youtu.be/v_LIcVyg5dk) by [Absent Data](https://www.youtube.com/@absentdata)
- [Memory in LangChain | Deep dive (python)](https://youtu.be/70lqvTFh_Yg) by [Eden Marco](https://www.youtube.com/@EdenMarco)
- [9 LangChain UseCases | Beginner's Guide | 2023](https://youtu.be/zS8_qosHNMw) by [Data Science Basics](https://www.youtube.com/@datasciencebasics)
- [Use Large Language Models in Jupyter Notebook | LangChain | Agents & Indexes](https://youtu.be/JSe11L1a_QQ) by [Abhinaw Tiwari](https://www.youtube.com/@AbhinawTiwariAT)
- [How to Talk to Your Langchain Agent | `11 Labs` + `Whisper`](https://youtu.be/N4k459Zw2PU) by [VRSEN](https://www.youtube.com/@vrsen)
- [LangChain Deep Dive: 5 FUN AI App Ideas To Build Quickly and Easily](https://youtu.be/mPYEPzLkeks) by [James NoCode](https://www.youtube.com/@jamesnocode)
- [LangChain 101: Models](https://youtu.be/T6c_XsyaNSQ) by [Mckay Wrigley](https://www.youtube.com/@realmckaywrigley)
- [LangChain with JavaScript Tutorial #1 | Setup & Using LLMs](https://youtu.be/W3AoeMrg27o) by [Leon van Zyl](https://www.youtube.com/@leonvanzyl)
- [LangChain Overview & Tutorial for Beginners: Build Powerful AI Apps Quickly & Easily (ZERO CODE)](https://youtu.be/iI84yym473Q) by [James NoCode](https://www.youtube.com/@jamesnocode)
- [LangChain In Action: Real-World Use Case With Step-by-Step Tutorial](https://youtu.be/UO699Szp82M) by [Rabbitmetrics](https://www.youtube.com/@rabbitmetrics)
- [Summarizing and Querying Multiple Papers with LangChain](https://youtu.be/p_MQRWH5Y6k) by [Automata Learning Lab](https://www.youtube.com/@automatalearninglab)
- [Using Langchain (and `Replit`) through `Tana`, ask `Google`/`Wikipedia`/`Wolfram Alpha` to fill out a table](https://youtu.be/Webau9lEzoI) by [Stian Håklev](https://www.youtube.com/@StianHaklev)
- [Langchain PDF App (GUI) | Create a ChatGPT For Your `PDF` in Python](https://youtu.be/wUAUdEw5oxM) by [Alejandro AO - Software & Ai](https://www.youtube.com/@alejandro_ao)
- [Auto-GPT with LangChain 🔥 | Create Your Own Personal AI Assistant](https://youtu.be/imDfPmMKEjM) by [Data Science Basics](https://www.youtube.com/@datasciencebasics)
- [Create Your OWN Slack AI Assistant with Python & LangChain](https://youtu.be/3jFXRNn2Bu8) by [Dave Ebbelaar](https://www.youtube.com/@daveebbelaar)
- [How to Create LOCAL Chatbots with GPT4All and LangChain [Full Guide]](https://youtu.be/4p1Fojur8Zw) by [Liam Ottley](https://www.youtube.com/@LiamOttley)
- [Build a `Multilingual PDF` Search App with LangChain, `Cohere` and `Bubble`](https://youtu.be/hOrtuumOrv8) by [Menlo Park Lab](https://www.youtube.com/@menloparklab)
- [Building a LangChain Agent (code-free!) Using `Bubble` and `Flowise`](https://youtu.be/jDJIIVWTZDE) by [Menlo Park Lab](https://www.youtube.com/@menloparklab)
- [Build a LangChain-based Semantic PDF Search App with No-Code Tools Bubble and Flowise](https://youtu.be/s33v5cIeqA4) by [Menlo Park Lab](https://www.youtube.com/@menloparklab)
- [LangChain Memory Tutorial | Building a ChatGPT Clone in Python](https://youtu.be/Cwq91cj2Pnc) by [Alejandro AO - Software & Ai](https://www.youtube.com/@alejandro_ao)
- [ChatGPT For Your DATA | Chat with Multiple Documents Using LangChain](https://youtu.be/TeDgIDqQmzs) by [Data Science Basics](https://www.youtube.com/@datasciencebasics)
- [`Llama Index`: Chat with Documentation using URL Loader](https://youtu.be/XJRoDEctAwA) by [Merk](https://www.youtube.com/@merksworld)
- [Using OpenAI, LangChain, and `Gradio` to Build Custom GenAI Applications](https://youtu.be/1MsmqMg3yUc) by [David Hundley](https://www.youtube.com/@dkhundley)
- [LangChain, Chroma DB, OpenAI Beginner Guide | ChatGPT with your PDF](https://youtu.be/FuqdVNB_8c0)
- [Build AI chatbot with custom knowledge base using OpenAI API and GPT Index](https://youtu.be/vDZAZuaXf48) by [Irina Nik](https://www.youtube.com/@irina_nik)
- [Build Your Own Auto-GPT Apps with LangChain (Python Tutorial)](https://youtu.be/NYSWn1ipbgg) by [Dave Ebbelaar](https://www.youtube.com/@daveebbelaar)
- [Chat with Multiple `PDFs` | LangChain App Tutorial in Python (Free LLMs and Embeddings)](https://youtu.be/dXxQ0LR-3Hg) by [Alejandro AO - Software & Ai](https://www.youtube.com/@alejandro_ao)
- [Chat with a `CSV` | `LangChain Agents` Tutorial (Beginners)](https://youtu.be/tjeti5vXWOU) by [Alejandro AO - Software & Ai](https://www.youtube.com/@alejandro_ao)
- [Create Your Own ChatGPT with `PDF` Data in 5 Minutes (LangChain Tutorial)](https://youtu.be/au2WVVGUvc8) by [Liam Ottley](https://www.youtube.com/@LiamOttley)
- [Build a Custom Chatbot with OpenAI: `GPT-Index` & LangChain | Step-by-Step Tutorial](https://youtu.be/FIDv6nc4CgU) by [Fabrikod](https://www.youtube.com/@fabrikod)
- [`Flowise` is an open-source no-code UI visual tool to build 🦜🔗LangChain applications](https://youtu.be/CovAPtQPU0k) by [Cobus Greyling](https://www.youtube.com/@CobusGreylingZA)
- [LangChain & GPT 4 For Data Analysis: The `Pandas` Dataframe Agent](https://youtu.be/rFQ5Kmkd4jc) by [Rabbitmetrics](https://www.youtube.com/@rabbitmetrics)
- [`GirlfriendGPT` - AI girlfriend with LangChain](https://youtu.be/LiN3D1QZGQw) by [Toolfinder AI](https://www.youtube.com/@toolfinderai)
- [How to build with Langchain 10x easier | ⛓️ LangFlow & `Flowise`](https://youtu.be/Ya1oGL7ZTvU) by [AI Jason](https://www.youtube.com/@AIJasonZ)
- [Getting Started With LangChain In 20 Minutes- Build Celebrity Search Application](https://youtu.be/_FpT1cwcSLg) by [Krish Naik](https://www.youtube.com/@krishnaik06)
- ⛓ [Vector Embeddings Tutorial Code Your Own AI Assistant with `GPT-4 API` + LangChain + NLP](https://youtu.be/yfHHvmaMkcA?si=5uJhxoh2tvdnOXok) by [FreeCodeCamp.org](https://www.youtube.com/@freecodecamp)
- ⛓ [Fully LOCAL `Llama 2` Q&A with LangChain](https://youtu.be/wgYctKFnQ74?si=UX1F3W-B3MqF4-K-) by [1littlecoder](https://www.youtube.com/@1littlecoder)
- ⛓ [Fully LOCAL `Llama 2` Langchain on CPU](https://youtu.be/yhECvKMu8kM?si=IvjxwlA1c09VwHZ4) by [1littlecoder](https://www.youtube.com/@1littlecoder)
- ⛓ [Build LangChain Audio Apps with Python in 5 Minutes](https://youtu.be/7w7ysaDz2W4?si=BvdMiyHhormr2-vr) by [AssemblyAI](https://www.youtube.com/@AssemblyAI)
- ⛓ [`Voiceflow` & `Flowise`: Want to Beat Competition? New Tutorial with Real AI Chatbot](https://youtu.be/EZKkmeFwag0?si=-4dETYDHEstiK_bb) by [AI SIMP](https://www.youtube.com/@aisimp)
- ⛓ [THIS Is How You Build Production-Ready AI Apps (`LangSmith` Tutorial)](https://youtu.be/tFXm5ijih98?si=lfiqpyaivxHFyI94) by [Dave Ebbelaar](https://www.youtube.com/@daveebbelaar)
- ⛓ [Build POWERFUL LLM Bots EASILY with Your Own Data - `Embedchain` - Langchain 2.0? (Tutorial)](https://youtu.be/jE24Y_GasE8?si=0yEDZt3BK5Q-LIuF) by [WorldofAI](https://www.youtube.com/@intheworldofai)
- ⛓ [`Code Llama` powered Gradio App for Coding: Runs on CPU](https://youtu.be/AJOhV6Ryy5o?si=ouuQT6IghYlc1NEJ) by [AI Anytime](https://www.youtube.com/@AIAnytime)
- ⛓ [LangChain Complete Course in One Video | Develop LangChain (AI) Based Solutions for Your Business](https://youtu.be/j9mQd-MyIg8?si=_wlNT3nP2LpDKztZ) by [UBprogrammer](https://www.youtube.com/@UBprogrammer)
- ⛓ [How to Run `LLaMA` Locally on CPU or GPU | Python & Langchain & CTransformers Guide](https://youtu.be/SvjWDX2NqiM?si=DxFml8XeGhiLTzLV) by [Code With Prince](https://www.youtube.com/@CodeWithPrince)
- ⛓ [PyData Heidelberg #11 - TimeSeries Forecasting & LLM Langchain](https://www.youtube.com/live/Glbwb5Hxu18?si=PIEY8Raq_C9PCHuW) by [PyData](https://www.youtube.com/@PyDataTV)
- ⛓ [Prompt Engineering in Web Development | Using LangChain and Templates with OpenAI](https://youtu.be/pK6WzlTOlYw?si=fkcDQsBG2h-DM8uQ) by [Akamai Developer
](https://www.youtube.com/@AkamaiDeveloper)
- ⛓ [Retrieval-Augmented Generation (RAG) using LangChain and `Pinecone` - The RAG Special Episode](https://youtu.be/J_tCD_J6w3s?si=60Mnr5VD9UED9bGG) by [Generative AI and Data Science On AWS](https://www.youtube.com/@GenerativeAIDataScienceOnAWS)
- ⛓ [`LLAMA2 70b-chat` Multiple Documents Chatbot with Langchain & Streamlit |All OPEN SOURCE|Replicate API](https://youtu.be/vhghB81vViM?si=dszzJnArMeac7lyc) by [DataInsightEdge](https://www.youtube.com/@DataInsightEdge01)
- ⛓ [Chatting with 44K Fashion Products: LangChain Opportunities and Pitfalls](https://youtu.be/Zudgske0F_s?si=8HSshHoEhh0PemJA) by [Rabbitmetrics](https://www.youtube.com/@rabbitmetrics)
- ⛓ [Structured Data Extraction from `ChatGPT` with LangChain](https://youtu.be/q1lYg8JISpQ?si=0HctzOHYZvq62sve) by [MG](https://www.youtube.com/@MG_cafe)
- ⛓ [Chat with Multiple PDFs using `Llama 2`, `Pinecone` and LangChain (Free LLMs and Embeddings)](https://youtu.be/TcJ_tVSGS4g?si=FZYnMDJyoFfL3Z2i) by [Muhammad Moin](https://www.youtube.com/@muhammadmoinfaisal)
- ⛓ [Integrate Audio into `LangChain.js` apps in 5 Minutes](https://youtu.be/hNpUSaYZIzs?si=Gb9h7W9A8lzfvFKi) by [AssemblyAI](https://www.youtube.com/@AssemblyAI)
- ⛓ [`ChatGPT` for your data with Local LLM](https://youtu.be/bWrjpwhHEMU?si=uM6ZZ18z9og4M90u) by [Jacob Jedryszek](https://www.youtube.com/@jj09)
- ⛓ [Training `Chatgpt` with your personal data using langchain step by step in detail](https://youtu.be/j3xOMde2v9Y?si=179HsiMU-hEPuSs4) by [NextGen Machines](https://www.youtube.com/@MayankGupta-kb5yc)
- ⛓ [Use ANY language in `LangSmith` with REST](https://youtu.be/7BL0GEdMmgY?si=iXfOEdBLqXF6hqRM) by [Nerding I/O](https://www.youtube.com/@nerding_io)
- ⛓ [How to Leverage the Full Potential of LLMs for Your Business with Langchain - Leon Ruddat](https://youtu.be/vZmoEa7oWMg?si=ZhMmydq7RtkZd56Q) by [PyData](https://www.youtube.com/@PyDataTV)
- ⛓ [`ChatCSV` App: Chat with CSV files using LangChain and `Llama 2`](https://youtu.be/PvsMg6jFs8E?si=Qzg5u5gijxj933Ya) by [Muhammad Moin](https://www.youtube.com/@muhammadmoinfaisal)
- ⛓ [Build Chat PDF app in Python with LangChain, OpenAI, Streamlit | Full project | Learn Coding](https://www.youtube.com/watch?v=WYzFzZg4YZI) by [Jutsupoint](https://www.youtube.com/@JutsuPoint)
- ⛓ [Build Eminem Bot App with LangChain, Streamlit, OpenAI | Full Python Project | Tutorial | AI ChatBot](https://www.youtube.com/watch?v=a2shHB4MRZ4) by [Jutsupoint](https://www.youtube.com/@JutsuPoint)
### [Prompt Engineering and LangChain](https://www.youtube.com/watch?v=muXbPpG_ys4&list=PLEJK-H61Xlwzm5FYLDdKt_6yibO33zoMW) by [Venelin Valkov](https://www.youtube.com/@venelin_valkov)
- [Getting Started with LangChain: Load Custom Data, Run OpenAI Models, Embeddings and `ChatGPT`](https://www.youtube.com/watch?v=muXbPpG_ys4)
- [Loaders, Indexes & Vectorstores in LangChain: Question Answering on `PDF` files with `ChatGPT`](https://www.youtube.com/watch?v=FQnvfR8Dmr0)
- [LangChain Models: `ChatGPT`, `Flan Alpaca`, `OpenAI Embeddings`, Prompt Templates & Streaming](https://www.youtube.com/watch?v=zy6LiK5F5-s)
- [LangChain Chains: Use `ChatGPT` to Build Conversational Agents, Summaries and Q&A on Text With LLMs](https://www.youtube.com/watch?v=h1tJZQPcimM)
- [Analyze Custom CSV Data with `GPT-4` using Langchain](https://www.youtube.com/watch?v=Ew3sGdX8at4)
- [Build ChatGPT Chatbots with LangChain Memory: Understanding and Implementing Memory in Conversations](https://youtu.be/CyuUlf54wTs)
---------------------
⛓ icon marks a new addition [last update 2024-02-04]

View File

@@ -0,0 +1,27 @@
# langchain-core
## 0.1.7 (Jan 5, 2024)
#### Deleted
No deletions.
#### Deprecated
- `BaseChatModel` methods `__call__`, `call_as_llm`, `predict`, `predict_messages`. Will be removed in 0.2.0. Use `BaseChatModel.invoke` instead.
- `BaseChatModel` methods `apredict`, `apredict_messages`. Will be removed in 0.2.0. Use `BaseChatModel.ainvoke` instead.
- `BaseLLM` methods `__call__, `predict`, `predict_messages`. Will be removed in 0.2.0. Use `BaseLLM.invoke` instead.
- `BaseLLM` methods `apredict`, `apredict_messages`. Will be removed in 0.2.0. Use `BaseLLM.ainvoke` instead.
#### Fixed
- Restrict recursive URL scraping: [#15559](https://github.com/langchain-ai/langchain/pull/15559)
#### Added
No additions.
#### Beta
- Marked `langchain_core.load.load` and `langchain_core.load.loads` as beta.
- Marked `langchain_core.beta.runnables.context.ContextGet` and `langchain_core.beta.runnables.context.ContextSet` as beta.

View File

@@ -0,0 +1,36 @@
# langchain
## 0.1.0 (Jan 5, 2024)
#### Deleted
No deletions.
#### Deprecated
Deprecated classes and methods will be removed in 0.2.0
| Deprecated | Alternative | Reason |
|---------------------------------|-----------------------------------|------------------------------------------------|
| ChatVectorDBChain | ConversationalRetrievalChain | More general to all retrievers |
| create_ernie_fn_chain | create_ernie_fn_runnable | Use LCEL under the hood |
| created_structured_output_chain | create_structured_output_runnable | Use LCEL under the hood |
| NatBotChain | | Not used |
| create_openai_fn_chain | create_openai_fn_runnable | Use LCEL under the hood |
| create_structured_output_chain | create_structured_output_runnable | Use LCEL under the hood |
| load_query_constructor_chain | load_query_constructor_runnable | Use LCEL under the hood |
| VectorDBQA | RetrievalQA | More general to all retrievers |
| Sequential Chain | LCEL | Obviated by LCEL |
| SimpleSequentialChain | LCEL | Obviated by LCEL |
| TransformChain | LCEL/RunnableLambda | Obviated by LCEL |
| create_tagging_chain | create_structured_output_runnable | Use LCEL under the hood |
| ChatAgent | create_react_agent | Use LCEL builder over a class |
| ConversationalAgent | create_react_agent | Use LCEL builder over a class |
| ConversationalChatAgent | create_json_chat_agent | Use LCEL builder over a class |
| initialize_agent | Individual create agent methods | Individual create agent methods are more clear |
| ZeroShotAgent | create_react_agent | Use LCEL builder over a class |
| OpenAIFunctionsAgent | create_openai_functions_agent | Use LCEL builder over a class |
| OpenAIMultiFunctionsAgent | create_openai_tools_agent | Use LCEL builder over a class |
| SelfAskWithSearchAgent | create_self_ask_with_search | Use LCEL builder over a class |
| StructuredChatAgent | create_structured_chat_agent | Use LCEL builder over a class |
| XMLAgent | create_xml_agent | Use LCEL builder over a class |

View File

@@ -0,0 +1,250 @@
---
sidebar_position: 1
---
# Contribute Code
To contribute to this project, please follow the ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow.
Please do not try to push directly to this repo unless you are a maintainer.
Please follow the checked-in pull request template when opening pull requests. Note related issues and tag relevant
maintainers.
Pull requests cannot land without passing the formatting, linting, and testing checks first. See [Testing](#testing) and
[Formatting and Linting](#formatting-and-linting) for how to run these checks locally.
It's essential that we maintain great documentation and testing. If you:
- Fix a bug
- Add a relevant unit or integration test when possible. These live in `tests/unit_tests` and `tests/integration_tests`.
- Make an improvement
- Update any affected example notebooks and documentation. These live in `docs`.
- Update unit and integration tests when relevant.
- Add a feature
- Add a demo notebook in `docs/docs/`.
- Add unit and integration tests.
We are a small, progress-oriented team. If there's something you'd like to add or change, opening a pull request is the
best way to get our attention.
## 🚀 Quick Start
This quick start guide explains how to run the repository locally.
For a [development container](https://containers.dev/), see the [.devcontainer folder](https://github.com/langchain-ai/langchain/tree/master/.devcontainer).
### Dependency Management: Poetry and other env/dependency managers
This project utilizes [Poetry](https://python-poetry.org/) v1.7.1+ as a dependency manager.
❗Note: *Before installing Poetry*, if you use `Conda`, create and activate a new Conda env (e.g. `conda create -n langchain python=3.9`)
Install Poetry: **[documentation on how to install it](https://python-poetry.org/docs/#installation)**.
❗Note: If you use `Conda` or `Pyenv` as your environment/package manager, after installing Poetry,
tell Poetry to use the virtualenv python environment (`poetry config virtualenvs.prefer-active-python true`)
### Different packages
This repository contains multiple packages:
- `langchain-core`: Base interfaces for key abstractions as well as logic for combining them in chains (LangChain Expression Language).
- `langchain-community`: Third-party integrations of various components.
- `langchain`: Chains, agents, and retrieval logic that makes up the cognitive architecture of your applications.
- `langchain-experimental`: Components and chains that are experimental, either in the sense that the techniques are novel and still being tested, or they require giving the LLM more access than would be possible in most production systems.
- Partner integrations: Partner packages in `libs/partners` that are independently version controlled.
Each of these has its own development environment. Docs are run from the top-level makefile, but development
is split across separate test & release flows.
For this quickstart, start with langchain-community:
```bash
cd libs/community
```
### Local Development Dependencies
Install langchain-community development requirements (for running langchain, running examples, linting, formatting, tests, and coverage):
```bash
poetry install --with lint,typing,test,test_integration
```
Then verify dependency installation:
```bash
make test
```
If during installation you receive a `WheelFileValidationError` for `debugpy`, please make sure you are running
Poetry v1.6.1+. This bug was present in older versions of Poetry (e.g. 1.4.1) and has been resolved in newer releases.
If you are still seeing this bug on v1.6.1+, you may also try disabling "modern installation"
(`poetry config installer.modern-installation false`) and re-installing requirements.
See [this `debugpy` issue](https://github.com/microsoft/debugpy/issues/1246) for more details.
### Testing
_In `langchain`, `langchain-community`, and `langchain-experimental`, some test dependencies are optional; see section about optional dependencies_.
Unit tests cover modular logic that does not require calls to outside APIs.
If you add new logic, please add a unit test.
To run unit tests:
```bash
make test
```
To run unit tests in Docker:
```bash
make docker_tests
```
There are also [integration tests and code-coverage](./testing) available.
### Only develop langchain_core or langchain_experimental
If you are only developing `langchain_core` or `langchain_experimental`, you can simply install the dependencies for the respective projects and run tests:
```bash
cd libs/core
poetry install --with test
make test
```
Or:
```bash
cd libs/experimental
poetry install --with test
make test
```
### Formatting and Linting
Run these locally before submitting a PR; the CI system will check also.
#### Code Formatting
Formatting for this project is done via [ruff](https://docs.astral.sh/ruff/rules/).
To run formatting for docs, cookbook and templates:
```bash
make format
```
To run formatting for a library, run the same command from the relevant library directory:
```bash
cd libs/{LIBRARY}
make format
```
Additionally, you can run the formatter only on the files that have been modified in your current branch as compared to the master branch using the format_diff command:
```bash
make format_diff
```
This is especially useful when you have made changes to a subset of the project and want to ensure your changes are properly formatted without affecting the rest of the codebase.
#### Linting
Linting for this project is done via a combination of [ruff](https://docs.astral.sh/ruff/rules/) and [mypy](http://mypy-lang.org/).
To run linting for docs, cookbook and templates:
```bash
make lint
```
To run linting for a library, run the same command from the relevant library directory:
```bash
cd libs/{LIBRARY}
make lint
```
In addition, you can run the linter only on the files that have been modified in your current branch as compared to the master branch using the lint_diff command:
```bash
make lint_diff
```
This can be very helpful when you've made changes to only certain parts of the project and want to ensure your changes meet the linting standards without having to check the entire codebase.
We recognize linting can be annoying - if you do not want to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed.
#### Spellcheck
Spellchecking for this project is done via [codespell](https://github.com/codespell-project/codespell).
Note that `codespell` finds common typos, so it could have false-positive (correctly spelled but rarely used) and false-negatives (not finding misspelled) words.
To check spelling for this project:
```bash
make spell_check
```
To fix spelling in place:
```bash
make spell_fix
```
If codespell is incorrectly flagging a word, you can skip spellcheck for that word by adding it to the codespell config in the `pyproject.toml` file.
```python
[tool.codespell]
...
# Add here:
ignore-words-list = 'momento,collison,ned,foor,reworkd,parth,whats,aapply,mysogyny,unsecure'
```
## Working with Optional Dependencies
`langchain`, `langchain-community`, and `langchain-experimental` rely on optional dependencies to keep these packages lightweight.
`langchain-core` and partner packages **do not use** optional dependencies in this way.
You only need to add a new dependency if a **unit test** relies on the package.
If your package is only required for **integration tests**, then you can skip these
steps and leave all pyproject.toml and poetry.lock files alone.
If you're adding a new dependency to Langchain, assume that it will be an optional dependency, and
that most users won't have it installed.
Users who do not have the dependency installed should be able to **import** your code without
any side effects (no warnings, no errors, no exceptions).
To introduce the dependency to the pyproject.toml file correctly, please do the following:
1. Add the dependency to the main group as an optional dependency
```bash
poetry add --optional [package_name]
```
2. Open pyproject.toml and add the dependency to the `extended_testing` extra
3. Relock the poetry file to update the extra.
```bash
poetry lock --no-update
```
4. Add a unit test that the very least attempts to import the new code. Ideally, the unit
test makes use of lightweight fixtures to test the logic of the code.
5. Please use the `@pytest.mark.requires(package_name)` decorator for any tests that require the dependency.
## Adding a Jupyter Notebook
If you are adding a Jupyter Notebook example, you'll want to install the optional `dev` dependencies.
To install dev dependencies:
```bash
poetry install --with dev
```
Launch a notebook:
```bash
poetry run jupyter notebook
```
When you run `poetry install`, the `langchain` package is installed as editable in the virtualenv, so your new logic can be imported into the notebook.

View File

@@ -0,0 +1,174 @@
---
sidebar_position: 3
---
# Contribute Documentation
LangChain documentation consists of two components:
1. Main Documentation: Hosted at [python.langchain.com](https://python.langchain.com/),
this comprehensive resource serves as the primary user-facing documentation.
It covers a wide array of topics, including tutorials, use cases, integrations,
and more, offering extensive guidance on building with LangChain.
The content for this documentation lives in the `/docs` directory of the monorepo.
2. In-code Documentation: This is documentation of the codebase itself, which is also
used to generate the externally facing [API Reference](https://api.python.langchain.com/en/latest/langchain_api_reference.html).
The content for the API reference is autogenerated by scanning the docstrings in the codebase. For this reason we ask that
developers document their code well.
The main documentation is built using [Quarto](https://quarto.org) and [Docusaurus 2](https://docusaurus.io/).
The `API Reference` is largely autogenerated by [sphinx](https://www.sphinx-doc.org/en/master/)
from the code and is hosted by [Read the Docs](https://readthedocs.org/).
We appreciate all contributions to the documentation, whether it be fixing a typo,
adding a new tutorial or example and whether it be in the main documentation or the API Reference.
Similar to linting, we recognize documentation can be annoying. If you do not want
to do it, please contact a project maintainer, and they can help you with it. We do not want this to be a blocker for good code getting contributed.
## 📜 Main Documentation
The content for the main documentation is located in the `/docs` directory of the monorepo.
The documentation is written using a combination of ipython notebooks (`.ipynb` files)
and markdown (`.mdx` files). The notebooks are converted to markdown
using [Quarto](https://quarto.org) and then built using [Docusaurus 2](https://docusaurus.io/).
Feel free to make contributions to the main documentation! 🥰
After modifying the documentation:
1. Run the linting and formatting commands (see below) to ensure that the documentation is well-formatted and free of errors.
2. Optionally build the documentation locally to verify that the changes look good.
3. Make a pull request with the changes.
4. You can preview and verify that the changes are what you wanted by clicking the `View deployment` or `Visit Preview` buttons on the pull request `Conversation` page. This will take you to a preview of the documentation changes.
## ⚒️ Linting and Building Documentation Locally
After writing up the documentation, you may want to lint and build the documentation
locally to ensure that it looks good and is free of errors.
If you're unable to build it locally that's okay as well, as you will be able to
see a preview of the documentation on the pull request page.
### Install dependencies
- [Quarto](https://quarto.org) - package that converts Jupyter notebooks (`.ipynb` files) into mdx files for serving in Docusaurus. [Download link](https://quarto.org/docs/download/).
From the **monorepo root**, run the following command to install the dependencies:
```bash
poetry install --with lint,docs --no-root
````
### Building
The code that builds the documentation is located in the `/docs` directory of the monorepo.
In the following commands, the prefix `api_` indicates that those are operations for the API Reference.
Before building the documentation, it is always a good idea to clean the build directory:
```bash
make docs_clean
make api_docs_clean
```
Next, you can build the documentation as outlined below:
```bash
make docs_build
make api_docs_build
```
Finally, run the link checker to ensure all links are valid:
```bash
make docs_linkcheck
make api_docs_linkcheck
```
### Linting and Formatting
The Main Documentation is linted from the **monorepo root**. To lint the main documentation, run the following from there:
```bash
make lint
```
If you have formatting-related errors, you can fix them automatically with:
```bash
make format
```
## ⌨️ In-code Documentation
The in-code documentation is largely autogenerated by [sphinx](https://www.sphinx-doc.org/en/master/) from the code and is hosted by [Read the Docs](https://readthedocs.org/).
For the API reference to be useful, the codebase must be well-documented. This means that all functions, classes, and methods should have a docstring that explains what they do, what the arguments are, and what the return value is. This is a good practice in general, but it is especially important for LangChain because the API reference is the primary resource for developers to understand how to use the codebase.
We generally follow the [Google Python Style Guide](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings) for docstrings.
Here is an example of a well-documented function:
```python
def my_function(arg1: int, arg2: str) -> float:
"""This is a short description of the function. (It should be a single sentence.)
This is a longer description of the function. It should explain what
the function does, what the arguments are, and what the return value is.
It should wrap at 88 characters.
Examples:
This is a section for examples of how to use the function.
.. code-block:: python
my_function(1, "hello")
Args:
arg1: This is a description of arg1. We do not need to specify the type since
it is already specified in the function signature.
arg2: This is a description of arg2.
Returns:
This is a description of the return value.
"""
return 3.14
```
### Linting and Formatting
The in-code documentation is linted from the directories belonging to the packages
being documented.
For example, if you're working on the `langchain-community` package, you would change
the working directory to the `langchain-community` directory:
```bash
cd [root]/libs/langchain-community
```
Set up a virtual environment for the package if you haven't done so already.
Install the dependencies for the package.
```bash
poetry install --with lint
```
Then you can run the following commands to lint and format the in-code documentation:
```bash
make format
make lint
```
## Verify Documentation Changes
After pushing documentation changes to the repository, you can preview and verify that the changes are
what you wanted by clicking the `View deployment` or `Visit Preview` buttons on the pull request `Conversation` page.
This will take you to a preview of the documentation changes.
This preview is created by [Vercel](https://vercel.com/docs/getting-started-with-vercel).

View File

@@ -0,0 +1,26 @@
---
sidebar_position: 6
sidebar_label: FAQ
---
# Frequently Asked Questions
## Pull Requests (PRs)
### How do I allow maintainers to edit my PR?
When you submit a pull request, there may be additional changes
necessary before merging it. Oftentimes, it is more efficient for the
maintainers to make these changes themselves before merging, rather than asking you
to do so in code review.
By default, most pull requests will have a
`✅ Maintainers are allowed to edit this pull request.`
badge in the right-hand sidebar.
If you do not see this badge, you may have this setting off for the fork you are
pull-requesting from. See [this Github docs page](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/allowing-changes-to-a-pull-request-branch-created-from-a-fork)
for more information.
Notably, Github doesn't allow this setting to be enabled for forks in **organizations** ([issue](https://github.com/orgs/community/discussions/5634)).
If you are working in an organization, we recommend submitting your PR from a personal
fork in order to enable this setting.

View File

@@ -0,0 +1,54 @@
---
sidebar_position: 0
---
# Welcome Contributors
Hi there! Thank you for even being interested in contributing to LangChain.
As an open-source project in a rapidly developing field, we are extremely open to contributions, whether they involve new features, improved infrastructure, better documentation, or bug fixes.
## 🗺️ Guidelines
### 👩‍💻 Ways to contribute
There are many ways to contribute to LangChain. Here are some common ways people contribute:
- [**Documentation**](./documentation.mdx): Help improve our docs, including this one!
- [**Code**](./code.mdx): Help us write code, fix bugs, or improve our infrastructure.
- [**Integrations**](integrations.mdx): Help us integrate with your favorite vendors and tools.
- [**Discussions**](https://github.com/langchain-ai/langchain/discussions): Help answer usage questions and discuss issues with users.
### 🚩 GitHub Issues
Our [issues](https://github.com/langchain-ai/langchain/issues) page is kept up to date with bugs, improvements, and feature requests.
There is a taxonomy of labels to help with sorting and discovery of issues of interest. Please use these to help organize issues.
If you start working on an issue, please assign it to yourself.
If you are adding an issue, please try to keep it focused on a single, modular bug/improvement/feature.
If two issues are related, or blocking, please link them rather than combining them.
We will try to keep these issues as up-to-date as possible, though
with the rapid rate of development in this field some may get out of date.
If you notice this happening, please let us know.
### 💭 GitHub Discussions
We have a [discussions](https://github.com/langchain-ai/langchain/discussions) page where users can ask usage questions, discuss design decisions, and propose new features.
If you are able to help answer questions, please do so! This will allow the maintainers to spend more time focused on development and bug fixing.
### 🙋 Getting Help
Our goal is to have the simplest developer setup possible. Should you experience any difficulty getting setup, please
contact a maintainer! Not only do we want to help get you unblocked, but we also want to make sure that the process is
smooth for future contributors.
In a similar vein, we do enforce certain linting, formatting, and documentation standards in the codebase.
If you are finding these difficult (or even just annoying) to work with, feel free to contact a maintainer for help -
we do not want these to get in the way of getting good code into the codebase.
# 🌟 Recognition
If your contribution has made its way into a release, we will want to give you credit on Twitter (only if you want though)!
If you have a Twitter account you would like us to mention, please let us know in the PR or through another means.

View File

@@ -0,0 +1,145 @@
---
sidebar_position: 5
---
# Contribute Integrations
To begin, make sure you have all the dependencies outlined in guide on [Contributing Code](./code).
There are a few different places you can contribute integrations for LangChain:
- **Community**: For lighter-weight integrations that are primarily maintained by LangChain and the Open Source Community.
- **Partner Packages**: For independent packages that are co-maintained by LangChain and a partner.
For the most part, new integrations should be added to the Community package. Partner packages require more maintenance as separate packages, so please confirm with the LangChain team before creating a new partner package.
In the following sections, we'll walk through how to contribute to each of these packages from a fake company, `Parrot Link AI`.
## Community Package
The `langchain-community` package is in `libs/community` and contains most integrations.
It is installed by users with `pip install langchain-community`, and exported members can be imported with code like
```python
from langchain_community.chat_models import ParrotLinkLLM
from langchain_community.llms import ChatParrotLink
from langchain_community.vectorstores import ParrotLinkVectorStore
```
The community package relies on manually-installed dependent packages, so you will see errors if you try to import a package that is not installed. In our fake example, if you tried to import `ParrotLinkLLM` without installing `parrot-link-sdk`, you will see an `ImportError` telling you to install it when trying to use it.
Let's say we wanted to implement a chat model for Parrot Link AI. We would create a new file in `libs/community/langchain_community/chat_models/parrot_link.py` with the following code:
```python
from langchain_core.language_models.chat_models import BaseChatModel
class ChatParrotLink(BaseChatModel):
"""ChatParrotLink chat model.
Example:
.. code-block:: python
from langchain_parrot_link import ChatParrotLink
model = ChatParrotLink()
"""
...
```
And we would write tests in:
- Unit tests: `libs/community/tests/unit_tests/chat_models/test_parrot_link.py`
- Integration tests: `libs/community/tests/integration_tests/chat_models/test_parrot_link.py`
And add documentation to:
- `docs/docs/integrations/chat/parrot_link.ipynb`
## Partner Packages
Partner packages are in `libs/partners/*` and are installed by users with `pip install langchain-{partner}`, and exported members can be imported with code like
```python
from langchain_{partner} import X
```
### Set up a new package
To set up a new partner package, use the latest version of the LangChain CLI. You can install or update it with:
```bash
pip install -U langchain-cli
```
Let's say you want to create a new partner package working for a company called Parrot Link AI.
Then, run the following command to create a new partner package:
```bash
cd libs/partners
langchain-cli integration new
> Name: parrot-link
> Name of integration in PascalCase [ParrotLink]: ParrotLink
```
This will create a new package in `libs/partners/parrot-link` with the following structure:
```
libs/partners/parrot-link/
langchain_parrot_link/ # folder containing your package
...
tests/
...
docs/ # bootstrapped docs notebooks, must be moved to /docs in monorepo root
...
scripts/ # scripts for CI
...
LICENSE
README.md # fill out with information about your package
Makefile # default commands for CI
pyproject.toml # package metadata, mostly managed by Poetry
poetry.lock # package lockfile, managed by Poetry
.gitignore
```
### Implement your package
First, add any dependencies your package needs, such as your company's SDK:
```bash
poetry add parrot-link-sdk
```
If you need separate dependencies for type checking, you can add them to the `typing` group with:
```bash
poetry add --group typing types-parrot-link-sdk
```
Then, implement your package in `libs/partners/parrot-link/langchain_parrot_link`.
By default, this will include stubs for a Chat Model, an LLM, and/or a Vector Store. You should delete any of the files you won't use and remove them from `__init__.py`.
### Write Unit and Integration Tests
Some basic tests are generated in the tests/ directory. You should add more tests to cover your package's functionality.
For information on running and implementing tests, see the [Testing guide](./testing).
### Write documentation
Documentation is generated from Jupyter notebooks in the `docs/` directory. You should move the generated notebooks to the relevant `docs/docs/integrations` directory in the monorepo root.
### Additional steps
Contributor steps:
- [ ] Add secret names to manual integrations workflow in `.github/workflows/_integration_test.yml`
- [ ] Add secrets to release workflow (for pre-release testing) in `.github/workflows/_release.yml`
Maintainer steps (Contributors should **not** do these):
- [ ] set up pypi and test pypi projects
- [ ] add credential secrets to Github Actions
- [ ] add package to conda-forge

View File

@@ -0,0 +1,54 @@
---
sidebar_position: 0.5
---
# Repository Structure
If you plan on contributing to LangChain code or documentation, it can be useful
to understand the high level structure of the repository.
LangChain is organized as a [monorep](https://en.wikipedia.org/wiki/Monorepo) that contains multiple packages.
Here's the structure visualized as a tree:
```text
.
├── cookbook # Tutorials and examples
├── docs # Contains content for the documentation here: https://python.langchain.com/
├── libs
│ ├── langchain # Main package
│ │ ├── tests/unit_tests # Unit tests (present in each package not shown for brevity)
│ │ ├── tests/integration_tests # Integration tests (present in each package not shown for brevity)
│ ├── langchain-community # Third-party integrations
│ ├── langchain-core # Base interfaces for key abstractions
│ ├── langchain-experimental # Experimental components and chains
│ ├── partners
│ ├── langchain-partner-1
│ ├── langchain-partner-2
│ ├── ...
├── templates # A collection of easily deployable reference architectures for a wide variety of tasks.
```
The root directory also contains the following files:
* `pyproject.toml`: Dependencies for building docs and linting docs, cookbook.
* `Makefile`: A file that contains shortcuts for building, linting and docs and cookbook.
There are other files in the root directory level, but their presence should be self-explanatory. Feel free to browse around!
## Documentation
The `/docs` directory contains the content for the documentation that is shown
at https://python.langchain.com/ and the associated API Reference https://api.python.langchain.com/en/latest/langchain_api_reference.html.
See the [documentation](./documentation) guidelines to learn how to contribute to the documentation.
## Code
The `/libs` directory contains the code for the LangChain packages.
To learn more about how to contribute code see the following guidelines:
- [Code](./code.mdx) Learn how to develop in the LangChain codebase.
- [Integrations](./integrations.mdx) to learn how to contribute to third-party integrations to langchain-community or to start a new partner package.
- [Testing](./testing.mdx) guidelines to learn how to write tests for the packages.

View File

@@ -0,0 +1,147 @@
---
sidebar_position: 2
---
# Testing
All of our packages have unit tests and integration tests, and we favor unit tests over integration tests.
Unit tests run on every pull request, so they should be fast and reliable.
Integration tests run once a day, and they require more setup, so they should be reserved for confirming interface points with external services.
## Unit Tests
Unit tests cover modular logic that does not require calls to outside APIs.
If you add new logic, please add a unit test.
To install dependencies for unit tests:
```bash
poetry install --with test
```
To run unit tests:
```bash
make test
```
To run unit tests in Docker:
```bash
make docker_tests
```
To run a specific test:
```bash
TEST_FILE=tests/unit_tests/test_imports.py make test
```
## Integration Tests
Integration tests cover logic that requires making calls to outside APIs (often integration with other services).
If you add support for a new external API, please add a new integration test.
**Warning:** Almost no tests should be integration tests.
Tests that require making network connections make it difficult for other
developers to test the code.
Instead favor relying on `responses` library and/or mock.patch to mock
requests using small fixtures.
To install dependencies for integration tests:
```bash
poetry install --with test,test_integration
```
To run integration tests:
```bash
make integration_tests
```
### Prepare
The integration tests use several search engines and databases. The tests
aim to verify the correct behavior of the engines and databases according to
their specifications and requirements.
To run some integration tests, such as tests located in
`tests/integration_tests/vectorstores/`, you will need to install the following
software:
- Docker
- Python 3.8.1 or later
Any new dependencies should be added by running:
```bash
# add package and install it after adding:
poetry add tiktoken@latest --group "test_integration" && poetry install --with test_integration
```
Before running any tests, you should start a specific Docker container that has all the
necessary dependencies installed. For instance, we use the `elasticsearch.yml` container
for `test_elasticsearch.py`:
```bash
cd tests/integration_tests/vectorstores/docker-compose
docker-compose -f elasticsearch.yml up
```
For environments that requires more involving preparation, look for `*.sh`. For instance,
`opensearch.sh` builds a required docker image and then launch opensearch.
### Prepare environment variables for local testing:
- copy `tests/integration_tests/.env.example` to `tests/integration_tests/.env`
- set variables in `tests/integration_tests/.env` file, e.g `OPENAI_API_KEY`
Additionally, it's important to note that some integration tests may require certain
environment variables to be set, such as `OPENAI_API_KEY`. Be sure to set any required
environment variables before running the tests to ensure they run correctly.
### Recording HTTP interactions with pytest-vcr
Some of the integration tests in this repository involve making HTTP requests to
external services. To prevent these requests from being made every time the tests are
run, we use pytest-vcr to record and replay HTTP interactions.
When running tests in a CI/CD pipeline, you may not want to modify the existing
cassettes. You can use the --vcr-record=none command-line option to disable recording
new cassettes. Here's an example:
```bash
pytest --log-cli-level=10 tests/integration_tests/vectorstores/test_pinecone.py --vcr-record=none
pytest tests/integration_tests/vectorstores/test_elasticsearch.py --vcr-record=none
```
### Run some tests with coverage:
```bash
pytest tests/integration_tests/vectorstores/test_elasticsearch.py --cov=langchain --cov-report=html
start "" htmlcov/index.html || open htmlcov/index.html
```
## Coverage
Code coverage (i.e. the amount of code that is covered by unit tests) helps identify areas of the code that are potentially more or less brittle.
Coverage requires the dependencies for integration tests:
```bash
poetry install --with test_integration
```
To get a report of current coverage, run the following:
```bash
make coverage
```

Some files were not shown because too many files have changed in this diff Show More