diff --git a/docs/docs/integrations/llms/oci_generative_ai.ipynb b/docs/docs/integrations/llms/oci_generative_ai.ipynb index 200f1038f3e..40332f13e1c 100644 --- a/docs/docs/integrations/llms/oci_generative_ai.ipynb +++ b/docs/docs/integrations/llms/oci_generative_ai.ipynb @@ -98,6 +98,9 @@ " model_id=\"MY_MODEL\",\n", " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", " compartment_id=\"MY_OCID\",\n", + " auth_type=\"SECURITY_TOKEN\",\n", + " auth_profile=\"MY_PROFILE\", # replace with your profile name\n", + " model_kwargs={\"temperature\": 0.7, \"top_p\": 0.75, \"max_tokens\": 200},\n", ")\n", "\n", "prompt = PromptTemplate(input_variables=[\"query\"], template=\"{query}\")\n", @@ -158,13 +161,6 @@ "print(chain.invoke(\"when was oracle founded?\"))\n", "print(chain.invoke(\"where is oracle headquartered?\"))" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/docs/integrations/platforms/index.mdx b/docs/docs/integrations/platforms/index.mdx index ee66df3324a..9416e13e522 100644 --- a/docs/docs/integrations/platforms/index.mdx +++ b/docs/docs/integrations/platforms/index.mdx @@ -25,3 +25,4 @@ LangChain integrates with many providers - [AWS](/docs/integrations/platforms/aws) - [Hugging Face](/docs/integrations/platforms/huggingface) - [Microsoft](/docs/integrations/platforms/microsoft) +- [Oracle Cloud Infrastructure (OCI)](/docs/integrations/platforms/oci) diff --git a/docs/docs/integrations/platforms/oci.mdx b/docs/docs/integrations/platforms/oci.mdx new file mode 100644 index 00000000000..e0b3570028f --- /dev/null +++ b/docs/docs/integrations/platforms/oci.mdx @@ -0,0 +1,58 @@ +# Oracle Cloud Infrastructure (OCI) + +The `LangChain` integrations related to [Oracle Cloud Infrastructure](https://www.oracle.com/artificial-intelligence/). + +## LLMs + +### OCI Generative AI +> Oracle Cloud Infrastructure (OCI) [Generative AI](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm) is a fully managed service that provides a set of state-of-the-art, +> customizable large language models (LLMs) that cover a wide range of use cases, and which are available through a single API. +> Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned +> custom models based on your own data on dedicated AI clusters. + +To use, you should have the latest `oci` python SDK installed. + +```bash +pip install -U oci +``` + +See [usage examples](/docs/integrations/llms/oci_generative_ai). + +```python +from langchain_community.llms import OCIGenAI +``` + +### OCI Data Science Model Deployment Endpoint + +> [OCI Data Science](https://docs.oracle.com/en-us/iaas/data-science/using/home.htm) is a +> fully managed and serverless platform for data science teams. Using the OCI Data Science +> platform you can build, train, and manage machine learning models, and then deploy them +> as an OCI Model Deployment Endpoint using the +> [OCI Data Science Model Deployment Service](https://docs.oracle.com/en-us/iaas/data-science/using/model-dep-about.htm). + +If you deployed a LLM with the VLLM or TGI framework, you can use the +`OCIModelDeploymentVLLM` or `OCIModelDeploymentTGI` classes to interact with it. + +To use, you should have the latest `oracle-ads` python SDK installed. + +```bash +pip install -U oracle-ads +``` + +See [usage examples](/docs/integrations/llms/oci_model_deployment_endpoint). + +```python +from langchain_community.llms import OCIModelDeploymentVLLM + +from langchain_community.llms import OCIModelDeploymentTGI +``` + +## Text Embedding Models + +### OCI Generative AI + +See [usage examples](/docs/integrations/text_embedding/oci_generative_ai). + +```python +from langchain_community.embeddings import OCIGenAIEmbeddings +``` \ No newline at end of file diff --git a/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb b/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb new file mode 100755 index 00000000000..ae935e4d354 --- /dev/null +++ b/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Oracle Cloud Infrastructure Generative AI" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Oracle Cloud Infrastructure (OCI) Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs), that cover a wide range of use cases, and which are available through a single API.\n", + "Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned custom models based on your own data on dedicated AI clusters. Detailed documentation of the service and API is available __[here](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm)__ and __[here](https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai/20231130/)__.\n", + "\n", + "This notebook explains how to use OCI's Genrative AI models with LangChain." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Prerequisite\n", + "We will need to install the oci sdk" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -U oci" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### OCI Generative AI API endpoint \n", + "https://inference.generativeai.us-chicago-1.oci.oraclecloud.com" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Authentication\n", + "The authentication methods supported for this langchain integration are:\n", + "\n", + "1. API Key\n", + "2. Session token\n", + "3. Instance principal\n", + "4. Resource principal \n", + "\n", + "These follows the standard SDK authentication methods detailed __[here](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm)__.\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.embeddings import OCIGenAIEmbeddings\n", + "\n", + "# use default authN method API-key\n", + "embeddings = OCIGenAIEmbeddings(\n", + " model_id=\"MY_EMBEDDING_MODEL\",\n", + " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", + " compartment_id=\"MY_OCID\",\n", + ")\n", + "\n", + "\n", + "query = \"This is a query in English.\"\n", + "response = embeddings.embed_query(query)\n", + "print(response)\n", + "\n", + "documents = [\"This is a sample document\", \"and here is another one\"]\n", + "response = embeddings.embed_documents(documents)\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Use Session Token to authN\n", + "embeddings = OCIGenAIEmbeddings(\n", + " model_id=\"MY_EMBEDDING_MODEL\",\n", + " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", + " compartment_id=\"MY_OCID\",\n", + " auth_type=\"SECURITY_TOKEN\",\n", + " auth_profile=\"MY_PROFILE\", # replace with your profile name\n", + ")\n", + "\n", + "\n", + "query = \"This is a sample query\"\n", + "response = embeddings.embed_query(query)\n", + "print(response)\n", + "\n", + "documents = [\"This is a sample document\", \"and here is another one\"]\n", + "response = embeddings.embed_documents(documents)\n", + "print(response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "oci_langchain", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}