notebook fmt (#12498)

This commit is contained in:
Bagatur
2023-10-29 15:50:09 -07:00
committed by GitHub
parent 56cc5b847c
commit 2424fff3f1
342 changed files with 8261 additions and 6796 deletions

View File

@@ -33,7 +33,7 @@
"from langchain.vectorstores import Pinecone\n",
"from langchain.embeddings import OpenAIEmbeddings\n",
"\n",
"pinecone.init(api_key=\"...\",environment=\"...\")"
"pinecone.init(api_key=\"...\", environment=\"...\")"
]
},
{
@@ -53,7 +53,7 @@
" \"doc7\": \"Climate change: The science and models.\",\n",
" \"doc8\": \"Global warming: A subset of climate change.\",\n",
" \"doc9\": \"How climate change affects daily weather.\",\n",
" \"doc10\": \"The history of climate change activism.\"\n",
" \"doc10\": \"The history of climate change activism.\",\n",
"}"
]
},
@@ -64,7 +64,9 @@
"metadata": {},
"outputs": [],
"source": [
"vectorstore = Pinecone.from_texts(list(all_documents.values()), OpenAIEmbeddings(), index_name='rag-fusion')"
"vectorstore = Pinecone.from_texts(\n",
" list(all_documents.values()), OpenAIEmbeddings(), index_name=\"rag-fusion\"\n",
")"
]
},
{
@@ -98,7 +100,7 @@
"source": [
"from langchain import hub\n",
"\n",
"prompt = hub.pull('langchain-ai/rag-fusion-query-generation')"
"prompt = hub.pull(\"langchain-ai/rag-fusion-query-generation\")"
]
},
{
@@ -122,7 +124,9 @@
"metadata": {},
"outputs": [],
"source": [
"generate_queries = prompt | ChatOpenAI(temperature=0) | StrOutputParser() | (lambda x: x.split(\"\\n\"))"
"generate_queries = (\n",
" prompt | ChatOpenAI(temperature=0) | StrOutputParser() | (lambda x: x.split(\"\\n\"))\n",
")"
]
},
{
@@ -171,6 +175,8 @@
"outputs": [],
"source": [
"from langchain.load import dumps, loads\n",
"\n",
"\n",
"def reciprocal_rank_fusion(results: list[list], k=60):\n",
" fused_scores = {}\n",
" for docs in results:\n",
@@ -181,9 +187,12 @@
" fused_scores[doc_str] = 0\n",
" previous_score = fused_scores[doc_str]\n",
" fused_scores[doc_str] += 1 / (rank + k)\n",
" \n",
" reranked_results = [(loads(doc), score) for doc, score in sorted(fused_scores.items(), key=lambda x: x[1], reverse=True)]\n",
" return reranked_results "
"\n",
" reranked_results = [\n",
" (loads(doc), score)\n",
" for doc, score in sorted(fused_scores.items(), key=lambda x: x[1], reverse=True)\n",
" ]\n",
" return reranked_results"
]
},
{