mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-01 19:12:42 +00:00
community: add OCI Endpoint (#14250)
- **Description:** - [OCI Data Science](https://docs.oracle.com/en-us/iaas/data-science/using/home.htm) is a fully managed and serverless platform for data science teams to build, train, and manage machine learning models in the Oracle Cloud Infrastructure. This PR add integration for using LangChain with an LLM hosted on a [OCI Data Science Model Deployment](https://docs.oracle.com/en-us/iaas/data-science/using/model-dep-about.htm). To authenticate, [oracle-ads](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/cli/authentication.html) has been used to automatically load credentials for invoking endpoint. - **Issue:** None - **Dependencies:** `oracle-ads` - **Tag maintainer:** @baskaryan - **Twitter handle:** None --------- Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
@@ -49,6 +49,8 @@ EXPECT_ALL = [
|
||||
"Modal",
|
||||
"MosaicML",
|
||||
"Nebula",
|
||||
"OCIModelDeploymentTGI",
|
||||
"OCIModelDeploymentVLLM",
|
||||
"NIBittensorLLM",
|
||||
"NLPCloud",
|
||||
"Ollama",
|
||||
|
@@ -0,0 +1,46 @@
|
||||
"""Test OCI Data Science Model Deployment Endpoint."""
|
||||
import pytest
|
||||
import responses
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from langchain_community.llms import OCIModelDeploymentTGI, OCIModelDeploymentVLLM
|
||||
|
||||
|
||||
@pytest.mark.requires("ads")
|
||||
@responses.activate
|
||||
def test_call_vllm(mocker: MockerFixture) -> None:
|
||||
"""Test valid call to oci model deployment endpoint."""
|
||||
endpoint = "https://MD_OCID/predict"
|
||||
responses.add(
|
||||
responses.POST,
|
||||
endpoint,
|
||||
json={
|
||||
"choices": [{"index": 0, "text": "This is a completion."}],
|
||||
},
|
||||
status=200,
|
||||
)
|
||||
mocker.patch("ads.common.auth.default_signer", return_value=dict(signer=None))
|
||||
|
||||
llm = OCIModelDeploymentVLLM(endpoint=endpoint, model="my_model")
|
||||
output = llm.invoke("This is a prompt.")
|
||||
assert isinstance(output, str)
|
||||
|
||||
|
||||
@pytest.mark.requires("ads")
|
||||
@responses.activate
|
||||
def test_call_tgi(mocker: MockerFixture) -> None:
|
||||
"""Test valid call to oci model deployment endpoint."""
|
||||
endpoint = "https://MD_OCID/predict"
|
||||
responses.add(
|
||||
responses.POST,
|
||||
endpoint,
|
||||
json={
|
||||
"generated_text": "This is a completion.",
|
||||
},
|
||||
status=200,
|
||||
)
|
||||
mocker.patch("ads.common.auth.default_signer", return_value=dict(signer=None))
|
||||
|
||||
llm = OCIModelDeploymentTGI(endpoint=endpoint)
|
||||
output = llm.invoke("This is a prompt.")
|
||||
assert isinstance(output, str)
|
Reference in New Issue
Block a user