community[patch]: Sambanova integration api update (#21848)

- **Description:**:
        SambaStudio generic endpoint compatibility added
        Improved error description, and handling
        streaming examples added
This commit is contained in:
Jorge Piedrahita Ortiz
2024-05-20 17:29:59 -05:00
committed by GitHub
parent c6da9533ac
commit e6207ad4f3
2 changed files with 248 additions and 70 deletions

View File

@@ -99,6 +99,36 @@
"print(llm.invoke(\"Why should I use open source models?\"))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Streaming response\n",
"\n",
"from langchain_community.llms.sambanova import Sambaverse\n",
"\n",
"llm = Sambaverse(\n",
" sambaverse_model_name=\"Meta/llama-2-7b-chat-hf\",\n",
" streaming=True,\n",
" model_kwargs={\n",
" \"do_sample\": True,\n",
" \"max_tokens_to_generate\": 1000,\n",
" \"temperature\": 0.01,\n",
" \"process_prompt\": True,\n",
" \"select_expert\": \"llama-2-7b-chat-hf\",\n",
" # \"stop_sequences\": '\\\"sequence1\\\",\\\"sequence2\\\"',\n",
" # \"repetition_penalty\": 1.0,\n",
" # \"top_k\": 50,\n",
" # \"top_p\": 1.0\n",
" },\n",
")\n",
"\n",
"for chunk in llm.stream(\"Why should I use open source models?\"):\n",
" print(chunk, end=\"\", flush=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
@@ -147,12 +177,14 @@
"import os\n",
"\n",
"sambastudio_base_url = \"<Your SambaStudio environment URL>\"\n",
"# sambastudio_base_uri = \"<Your SambaStudio endpoint base URI>\" # optional, \"api/predict/nlp\" set as default\n",
"sambastudio_project_id = \"<Your SambaStudio project id>\"\n",
"sambastudio_endpoint_id = \"<Your SambaStudio endpoint id>\"\n",
"sambastudio_api_key = \"<Your SambaStudio endpoint API key>\"\n",
"\n",
"# Set the environment variables\n",
"os.environ[\"SAMBASTUDIO_BASE_URL\"] = sambastudio_base_url\n",
"# os.environ[\"SAMBASTUDIO_BASE_URI\"] = sambastudio_base_uri\n",
"os.environ[\"SAMBASTUDIO_PROJECT_ID\"] = sambastudio_project_id\n",
"os.environ[\"SAMBASTUDIO_ENDPOINT_ID\"] = sambastudio_endpoint_id\n",
"os.environ[\"SAMBASTUDIO_API_KEY\"] = sambastudio_api_key"
@@ -188,6 +220,33 @@
"\n",
"print(llm.invoke(\"Why should I use open source models?\"))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Streaming response\n",
"\n",
"from langchain_community.llms.sambanova import SambaStudio\n",
"\n",
"llm = SambaStudio(\n",
" streaming=True,\n",
" model_kwargs={\n",
" \"do_sample\": True,\n",
" \"max_tokens_to_generate\": 1000,\n",
" \"temperature\": 0.01,\n",
" # \"repetition_penalty\": 1.0,\n",
" # \"top_k\": 50,\n",
" # \"top_logprobs\": 0,\n",
" # \"top_p\": 1.0\n",
" },\n",
")\n",
"\n",
"for chunk in llm.stream(\"Why should I use open source models?\"):\n",
" print(chunk, end=\"\", flush=True)"
]
}
],
"metadata": {