mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-09 15:03:21 +00:00
Modify Anyscale integration to work with Anyscale Endpoint (#11569)
**Description:** Modify Anyscale integration to work with [Anyscale Endpoint](https://docs.endpoints.anyscale.com/) and it supports invoke, async invoke, stream and async invoke features --------- Co-authored-by: Bagatur <baskaryan@gmail.com>
This commit is contained in:
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"id": "9597802c",
|
||||
"metadata": {},
|
||||
@@ -10,9 +9,7 @@
|
||||
"\n",
|
||||
"[Anyscale](https://www.anyscale.com/) is a fully-managed [Ray](https://www.ray.io/) platform, on which you can build, deploy, and manage scalable AI and Python applications\n",
|
||||
"\n",
|
||||
"This example goes over how to use LangChain to interact with `Anyscale` [service](https://docs.anyscale.com/productionize/services-v2/get-started). \n",
|
||||
"\n",
|
||||
"It will send the requests to Anyscale Service endpoint, which is concatenate `ANYSCALE_SERVICE_URL` and `ANYSCALE_SERVICE_ROUTE`, with a token defined in `ANYSCALE_SERVICE_TOKEN`"
|
||||
"This example goes over how to use LangChain to interact with [Anyscale Endpoint](https://app.endpoints.anyscale.com/). "
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -26,9 +23,8 @@
|
||||
"source": [
|
||||
"import os\n",
|
||||
"\n",
|
||||
"os.environ[\"ANYSCALE_SERVICE_URL\"] = ANYSCALE_SERVICE_URL\n",
|
||||
"os.environ[\"ANYSCALE_SERVICE_ROUTE\"] = ANYSCALE_SERVICE_ROUTE\n",
|
||||
"os.environ[\"ANYSCALE_SERVICE_TOKEN\"] = ANYSCALE_SERVICE_TOKEN"
|
||||
"os.environ[\"ANYSCALE_API_BASE\"] = ANYSCALE_API_BASE\n",
|
||||
"os.environ[\"ANYSCALE_API_KEY\"] = ANYSCALE_API_KEY"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -41,7 +37,8 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from langchain.llms import Anyscale\n",
|
||||
"from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain"
|
||||
"from langchain.prompts import PromptTemplate\n",
|
||||
"from langchain.chains import LLMChain"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -69,7 +66,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"llm = Anyscale()"
|
||||
"llm = Anyscale(model_name=ANYSCALE_MODEL_NAME)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -99,7 +96,6 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"id": "42f05b34-1a44-4cbd-8342-35c1572b6765",
|
||||
"metadata": {},
|
||||
@@ -136,13 +132,11 @@
|
||||
"source": [
|
||||
"import ray\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"@ray.remote\n",
|
||||
"@ray.remote(num_cpus=0.1)\n",
|
||||
"def send_query(llm, prompt):\n",
|
||||
" resp = llm(prompt)\n",
|
||||
" return resp\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"futures = [send_query.remote(llm, prompt) for prompt in prompt_list]\n",
|
||||
"results = ray.get(futures)"
|
||||
]
|
||||
|
Reference in New Issue
Block a user