community[minor]: migrate bigdl-llm to ipex-llm (#19518)

- **Description**: `bigdl-llm` library has been renamed to
[`ipex-llm`](https://github.com/intel-analytics/ipex-llm). This PR
migrates the `bigdl-llm` integration to `ipex-llm` .
- **Issue**: N/A. The original PR of `bigdl-llm` is
https://github.com/langchain-ai/langchain/pull/17953
- **Dependencies**: `ipex-llm` library
- **Contribution maintainer**: @shane-huang

Updated doc:   docs/docs/integrations/llms/ipex_llm.ipynb
Updated test:
libs/community/tests/integration_tests/llms/test_ipex_llm.py
This commit is contained in:
Shengsheng Huang
2024-03-28 11:12:59 +08:00
committed by GitHub
parent a31f692f4e
commit ac1dd8ad94
8 changed files with 232 additions and 30 deletions

View File

@@ -1,11 +1,11 @@
"""Test BigDL LLM"""
"""Test BigdlLLM"""
from langchain_core.outputs import LLMResult
from langchain_community.llms.bigdl import BigdlLLM
from langchain_community.llms.bigdl_llm import BigdlLLM
def test_call() -> None:
"""Test valid call to baichuan."""
"""Test valid call to bigdl-llm."""
llm = BigdlLLM.from_model_id(
model_id="lmsys/vicuna-7b-v1.5",
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},
@@ -15,7 +15,7 @@ def test_call() -> None:
def test_generate() -> None:
"""Test valid call to baichuan."""
"""Test valid call to bigdl-llm."""
llm = BigdlLLM.from_model_id(
model_id="lmsys/vicuna-7b-v1.5",
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},

View File

@@ -0,0 +1,25 @@
"""Test IPEX LLM"""
from langchain_core.outputs import LLMResult
from langchain_community.llms.ipex_llm import IpexLLM
def test_call() -> None:
"""Test valid call to ipex-llm."""
llm = IpexLLM.from_model_id(
model_id="lmsys/vicuna-7b-v1.5",
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},
)
output = llm("Hello!")
assert isinstance(output, str)
def test_generate() -> None:
"""Test valid call to ipex-llm."""
llm = IpexLLM.from_model_id(
model_id="lmsys/vicuna-7b-v1.5",
model_kwargs={"temperature": 0, "max_length": 16, "trust_remote_code": True},
)
output = llm.generate(["Hello!"])
assert isinstance(output, LLMResult)
assert isinstance(output.generations, list)