huggingface: Add IPEX models support (#29179)

Co-authored-by: Erick Friis <erick@langchain.dev>
This commit is contained in:
Ella Charlaix 2025-01-22 01:16:44 +01:00 committed by GitHub
parent d6a7aaa97d
commit 6f95db81b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 186 additions and 60 deletions

View File

@ -9,6 +9,14 @@ from langchain_core.language_models.llms import BaseLLM
from langchain_core.outputs import Generation, GenerationChunk, LLMResult
from pydantic import ConfigDict, model_validator
from ..utils.import_utils import (
IMPORT_ERROR,
is_ipex_available,
is_openvino_available,
is_optimum_intel_available,
is_optimum_intel_version,
)
DEFAULT_MODEL_ID = "gpt2"
DEFAULT_TASK = "text-generation"
VALID_TASKS = (
@ -18,6 +26,8 @@ VALID_TASKS = (
"translation",
)
DEFAULT_BATCH_SIZE = 4
_MIN_OPTIMUM_VERSION = "1.21"
logger = logging.getLogger(__name__)
@ -126,70 +136,70 @@ class HuggingFacePipeline(BaseLLM):
_model_kwargs["device_map"] = device_map
tokenizer = AutoTokenizer.from_pretrained(model_id, **_model_kwargs)
try:
if task == "text-generation":
if backend == "openvino":
try:
from optimum.intel.openvino import ( # type: ignore[import]
OVModelForCausalLM,
)
except ImportError:
raise ValueError(
"Could not import optimum-intel python package. "
"Please install it with: "
"pip install 'optimum[openvino,nncf]' "
)
try:
# use local model
model = OVModelForCausalLM.from_pretrained(
model_id, **_model_kwargs
)
except Exception:
# use remote model
model = OVModelForCausalLM.from_pretrained(
model_id, export=True, **_model_kwargs
)
else:
model = AutoModelForCausalLM.from_pretrained(
model_id, **_model_kwargs
)
elif task in ("text2text-generation", "summarization", "translation"):
if backend == "openvino":
try:
from optimum.intel.openvino import OVModelForSeq2SeqLM
except ImportError:
raise ValueError(
"Could not import optimum-intel python package. "
"Please install it with: "
"pip install 'optimum[openvino,nncf]' "
)
try:
# use local model
model = OVModelForSeq2SeqLM.from_pretrained(
model_id, **_model_kwargs
)
except Exception:
# use remote model
model = OVModelForSeq2SeqLM.from_pretrained(
model_id, export=True, **_model_kwargs
)
else:
model = AutoModelForSeq2SeqLM.from_pretrained(
model_id, **_model_kwargs
)
else:
if backend in {"openvino", "ipex"}:
if task not in VALID_TASKS:
raise ValueError(
f"Got invalid task {task}, "
f"currently only {VALID_TASKS} are supported"
)
except ImportError as e:
raise ValueError(
f"Could not load the {task} model due to missing dependencies."
) from e
err_msg = f'Backend: {backend} {IMPORT_ERROR.format(f"optimum[{backend}]")}'
if not is_optimum_intel_available():
raise ImportError(err_msg)
# TODO: upgrade _MIN_OPTIMUM_VERSION to 1.22 after release
min_optimum_version = (
"1.22"
if backend == "ipex" and task != "text-generation"
else _MIN_OPTIMUM_VERSION
)
if is_optimum_intel_version("<", min_optimum_version):
raise ImportError(
f"Backend: {backend} requires optimum-intel>="
f"{min_optimum_version}. You can install it with pip: "
"`pip install --upgrade --upgrade-strategy eager "
f"`optimum[{backend}]`."
)
if backend == "openvino":
if not is_openvino_available():
raise ImportError(err_msg)
from optimum.intel import ( # type: ignore[import]
OVModelForCausalLM,
OVModelForSeq2SeqLM,
)
model_cls = (
OVModelForCausalLM
if task == "text-generation"
else OVModelForSeq2SeqLM
)
else:
if not is_ipex_available():
raise ImportError(err_msg)
if task == "text-generation":
from optimum.intel import (
IPEXModelForCausalLM, # type: ignore[import]
)
model_cls = IPEXModelForCausalLM
else:
from optimum.intel import (
IPEXModelForSeq2SeqLM, # type: ignore[import]
)
model_cls = IPEXModelForSeq2SeqLM
else:
model_cls = (
AutoModelForCausalLM
if task == "text-generation"
else AutoModelForSeq2SeqLM
)
model = model_cls.from_pretrained(model_id, **_model_kwargs)
if tokenizer.pad_token is None:
tokenizer.pad_token_id = model.config.eos_token_id

View File

@ -0,0 +1,116 @@
import importlib.metadata
import importlib.util
import operator as op
from typing import Union
from packaging import version
STR_OPERATION_TO_FUNC = {
">": op.gt,
">=": op.ge,
"==": op.eq,
"!=": op.ne,
"<=": op.le,
"<": op.lt,
}
_optimum_available = importlib.util.find_spec("optimum") is not None
_optimum_version = "N/A"
if _optimum_available:
try:
_optimum_version = importlib.metadata.version("optimum")
except importlib.metadata.PackageNotFoundError:
_optimum_available = False
_optimum_intel_available = (
_optimum_available and importlib.util.find_spec("optimum.intel") is not None
)
_optimum_intel_version = "N/A"
if _optimum_intel_available:
try:
_optimum_intel_version = importlib.metadata.version("optimum-intel")
except importlib.metadata.PackageNotFoundError:
_optimum_intel_available = False
_ipex_available = importlib.util.find_spec("intel_extension_for_pytorch") is not None
_openvino_available = importlib.util.find_spec("openvino") is not None
# This function was copied from: https://github.com/huggingface/accelerate/blob/874c4967d94badd24f893064cc3bef45f57cadf7/src/accelerate/utils/versions.py#L319
def compare_versions(
library_or_version: Union[str, version.Version],
operation: str,
requirement_version: str,
) -> bool:
"""
Compare a library version to some requirement using a given operation.
Arguments:
library_or_version (`str` or `packaging.version.Version`):
A library name or a version to check.
operation (`str`):
A string representation of an operator, such as `">"` or `"<="`.
requirement_version (`str`):
The version to compare the library version against
"""
if operation not in STR_OPERATION_TO_FUNC.keys():
raise ValueError(
f"`operation` must be one of {list(STR_OPERATION_TO_FUNC.keys())}"
f", received {operation}"
)
if isinstance(library_or_version, str):
library_or_version = version.parse(
importlib.metadata.version(library_or_version)
)
return STR_OPERATION_TO_FUNC[operation](
library_or_version, version.parse(requirement_version)
)
def is_optimum_available() -> bool:
return _optimum_available
def is_optimum_intel_available() -> bool:
return _optimum_intel_available
def is_ipex_available() -> bool:
return _ipex_available
def is_openvino_available() -> bool:
return _openvino_available
def is_optimum_version(operation: str, reference_version: str) -> bool:
"""
Compare the current Optimum version to a given reference with an operation.
"""
if not _optimum_version:
return False
return compare_versions(
version.parse(_optimum_version), operation, reference_version
)
def is_optimum_intel_version(operation: str, reference_version: str) -> bool:
"""
Compare the current Optimum Intel version to a given reference with an operation.
"""
if not _optimum_intel_version:
return False
return compare_versions(
version.parse(_optimum_intel_version), operation, reference_version
)
IMPORT_ERROR = """
requires the {0} library but it was not found in your environment.
You can install it with pip: `pip install {0}`.
Please note that you may need to restart your runtime after installation.
"""