mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-23 03:19:38 +00:00
community: update documentation and model IDs for FriendliAI provider (#28984)
### Description - In the example, remove `llama-2-13b-chat`, `mixtral-8x7b-instruct-v0-1`. - Fix llm friendli streaming implementation. - Update examples in documentation and remove duplicates. ### Issue N/A ### Dependencies None ### Twitter handle `@friendliai`
This commit is contained in:
@@ -16,14 +16,19 @@ from langchain_core.utils.utils import convert_to_secret_str
|
||||
from pydantic import Field, SecretStr
|
||||
|
||||
|
||||
def _stream_response_to_generation_chunk(stream_response: Any) -> GenerationChunk:
|
||||
def _stream_response_to_generation_chunk(
|
||||
stream_response: Any,
|
||||
) -> GenerationChunk:
|
||||
"""Convert a stream response to a generation chunk."""
|
||||
if stream_response.event == "token_sampled":
|
||||
return GenerationChunk(
|
||||
text=stream_response.text,
|
||||
generation_info={"token": str(stream_response.token)},
|
||||
)
|
||||
return GenerationChunk(text="")
|
||||
if not stream_response.get("choices", None):
|
||||
return GenerationChunk(text="")
|
||||
return GenerationChunk(
|
||||
text=stream_response.choices[0].text,
|
||||
# generation_info=dict(
|
||||
# finish_reason=stream_response.choices[0].get("finish_reason", None),
|
||||
# logprobs=stream_response.choices[0].get("logprobs", None),
|
||||
# ),
|
||||
)
|
||||
|
||||
|
||||
class BaseFriendli(Serializable):
|
||||
@@ -34,7 +39,7 @@ class BaseFriendli(Serializable):
|
||||
# Friendli Async client.
|
||||
async_client: Any = Field(default=None, exclude=True)
|
||||
# Model name to use.
|
||||
model: str = "mixtral-8x7b-instruct-v0-1"
|
||||
model: str = "meta-llama-3.1-8b-instruct"
|
||||
# Friendli personal access token to run as.
|
||||
friendli_token: Optional[SecretStr] = None
|
||||
# Friendli team ID to run as.
|
||||
@@ -107,7 +112,7 @@ class Friendli(LLM, BaseFriendli):
|
||||
from langchain_community.llms import Friendli
|
||||
|
||||
friendli = Friendli(
|
||||
model="mixtral-8x7b-instruct-v0-1", friendli_token="YOUR FRIENDLI TOKEN"
|
||||
model="meta-llama-3.1-8b-instruct", friendli_token="YOUR FRIENDLI TOKEN"
|
||||
)
|
||||
"""
|
||||
|
||||
|
Reference in New Issue
Block a user