add oai v1 cookbook (#12961)

This commit is contained in:
Bagatur
2023-11-06 14:28:32 -08:00
committed by GitHub
parent 4f7dff9d66
commit 388f248391
6 changed files with 215 additions and 16 deletions

View File

@@ -84,9 +84,7 @@
"model_id = \"gpt2\"\n",
"tokenizer = AutoTokenizer.from_pretrained(model_id)\n",
"model = AutoModelForCausalLM.from_pretrained(model_id)\n",
"pipe = pipeline(\n",
" \"text-generation\", model=model, tokenizer=tokenizer, max_new_tokens=10\n",
")\n",
"pipe = pipeline(\"text-generation\", model=model, tokenizer=tokenizer, max_new_tokens=10)\n",
"hf = HuggingFacePipeline(pipeline=pipe)"
]
},

View File

@@ -39,8 +39,8 @@
"from langchain.embeddings import OpenCLIPEmbeddings\n",
"\n",
"# Images\n",
"img_path_dog='/Users/rlm/Desktop/Papers/LLaVA/dog.jpeg'\n",
"img_path_house='/Users/rlm/Desktop/Papers/LLaVA/house.jpeg'\n",
"img_path_dog = \"/Users/rlm/Desktop/Papers/LLaVA/dog.jpeg\"\n",
"img_path_house = \"/Users/rlm/Desktop/Papers/LLaVA/house.jpeg\"\n",
"\n",
"# Load images and convert to numpy arrays\n",
"image_np_dog = np.array(_PILImage.open(img_path_dog).convert(\"RGB\"))\n",
@@ -106,18 +106,26 @@
"text_feat_house_np = np.array(text_feat_house[0])\n",
"\n",
"# Compute similarity\n",
"similarities = np.array([\n",
" [text_feat_dog_np @ img_feat_dog_np.T][0][0], \n",
" [text_feat_dog_np @ img_feat_house_np.T][0][0],\n",
" [text_feat_house_np @ img_feat_dog_np.T][0][0], \n",
" [text_feat_house_np @ img_feat_house_np.T][0][0]\n",
"]).reshape(2, 2)\n",
"similarities = np.array(\n",
" [\n",
" [text_feat_dog_np @ img_feat_dog_np.T][0][0],\n",
" [text_feat_dog_np @ img_feat_house_np.T][0][0],\n",
" [text_feat_house_np @ img_feat_dog_np.T][0][0],\n",
" [text_feat_house_np @ img_feat_house_np.T][0][0],\n",
" ]\n",
").reshape(2, 2)\n",
"\n",
"# Ensure similarities is of shape (2, 2)\n",
"print(similarities.shape) # Expected: (2, 2)\n",
"\n",
"# Plot heatmap\n",
"sns.heatmap(similarities, annot=True, cmap='viridis', xticklabels=['dog image', 'house image'], yticklabels=['dog text', 'house text'])\n"
"sns.heatmap(\n",
" similarities,\n",
" annot=True,\n",
" cmap=\"viridis\",\n",
" xticklabels=[\"dog image\", \"house image\"],\n",
" yticklabels=[\"dog text\", \"house text\"],\n",
")"
]
}
],

View File

@@ -59,8 +59,8 @@
"import os\n",
"import getpass\n",
"\n",
"os.environ['QIANFAN_AK'] = getpass.getpass(\"Your Qianfan AK:\")\n",
"os.environ['QIANFAN_SK'] = getpass.getpass(\"Your Qianfan SK:\")"
"os.environ[\"QIANFAN_AK\"] = getpass.getpass(\"Your Qianfan AK:\")\n",
"os.environ[\"QIANFAN_SK\"] = getpass.getpass(\"Your Qianfan SK:\")"
]
},
{
@@ -85,6 +85,7 @@
"docs = text_splitter.split_documents(documents)\n",
"\n",
"from langchain.embeddings import QianfanEmbeddingsEndpoint\n",
"\n",
"embeddings = QianfanEmbeddingsEndpoint()"
]
},
@@ -104,8 +105,12 @@
"source": [
"# Create a bes instance and index docs.\n",
"from langchain.vectorstores import BESVectorStore\n",
"\n",
"bes = BESVectorStore.from_documents(\n",
" documents=docs, embedding=embeddings, bes_url=\"your bes cluster url\", index_name=\"your vector index\"\n",
" documents=docs,\n",
" embedding=embeddings,\n",
" bes_url=\"your bes cluster url\",\n",
" index_name=\"your vector index\",\n",
")\n",
"bes.client.indices.refresh(index=\"your vector index\")"
]

View File

@@ -108,7 +108,10 @@
"\n",
"dingo_client = DingoDB(user=\"\", password=\"\", host=[\"127.0.0.1:13000\"])\n",
"# First, check if our index already exists. If it doesn't, we create it\n",
"if index_name not in dingo_client.get_index() and index_name.upper() not in dingo_client.get_index():\n",
"if (\n",
" index_name not in dingo_client.get_index()\n",
" and index_name.upper() not in dingo_client.get_index()\n",
"):\n",
" # we create a new index, modify to your own\n",
" dingo_client.create_index(\n",
" index_name=index_name, dimension=1536, metric_type=\"cosine\", auto_id=False\n",