mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-16 16:11:02 +00:00
add aleph alpha llm (#1207)
Integrate Aleph Alpha's client into Langchain to provide access to the luminous models - more info on latest benchmarks here: https://www.aleph-alpha.com/luminous-performance-benchmarks
This commit is contained in:
parent
c6ab1bb3cb
commit
53c67e04d4
108
docs/modules/llms/integrations/aleph_alpha.ipynb
Normal file
108
docs/modules/llms/integrations/aleph_alpha.ipynb
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"attachments": {},
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "9597802c",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Aleph Alpha\n",
|
||||||
|
"This example goes over how to use LangChain to interact with Aleph Alpha models"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "6fb585dd",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from langchain.llms import AlephAlpha\n",
|
||||||
|
"from langchain import PromptTemplate, LLMChain"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "f81a230d",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"template = \"\"\"Q: {question}\n",
|
||||||
|
"\n",
|
||||||
|
"A:\"\"\"\n",
|
||||||
|
"\n",
|
||||||
|
"prompt = PromptTemplate(template=template, input_variables=[\"question\"])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "f0d26e48",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"llm = AlephAlpha(model=\"luminous-extended\", maximum_tokens=20, stop_sequences=[\"Q:\"])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "6811d621",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"llm_chain = LLMChain(prompt=prompt, llm=llm)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"id": "3058e63f",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"' Artificial Intelligence (AI) is the simulation of human intelligence processes by machines, especially computer systems.\\n'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 5,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"question = \"What is AI?\"\n",
|
||||||
|
"\n",
|
||||||
|
"llm_chain.run(question)"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": ".venv",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.9"
|
||||||
|
},
|
||||||
|
"vscode": {
|
||||||
|
"interpreter": {
|
||||||
|
"hash": "2d002ec47225e662695b764370d7966aa11eeb4302edc2f497bbf96d49c8f899"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
@ -2,6 +2,7 @@
|
|||||||
from typing import Dict, Type
|
from typing import Dict, Type
|
||||||
|
|
||||||
from langchain.llms.ai21 import AI21
|
from langchain.llms.ai21 import AI21
|
||||||
|
from langchain.llms.aleph_alpha import AlephAlpha
|
||||||
from langchain.llms.anthropic import Anthropic
|
from langchain.llms.anthropic import Anthropic
|
||||||
from langchain.llms.base import BaseLLM
|
from langchain.llms.base import BaseLLM
|
||||||
from langchain.llms.cerebriumai import CerebriumAI
|
from langchain.llms.cerebriumai import CerebriumAI
|
||||||
@ -20,6 +21,7 @@ from langchain.llms.self_hosted_hugging_face import SelfHostedHuggingFaceLLM
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Anthropic",
|
"Anthropic",
|
||||||
|
"AlephAlpha",
|
||||||
"CerebriumAI",
|
"CerebriumAI",
|
||||||
"Cohere",
|
"Cohere",
|
||||||
"ForefrontAI",
|
"ForefrontAI",
|
||||||
@ -39,6 +41,7 @@ __all__ = [
|
|||||||
|
|
||||||
type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
type_to_cls_dict: Dict[str, Type[BaseLLM]] = {
|
||||||
"ai21": AI21,
|
"ai21": AI21,
|
||||||
|
"aleph_alpha": AlephAlpha,
|
||||||
"anthropic": Anthropic,
|
"anthropic": Anthropic,
|
||||||
"cerebriumai": CerebriumAI,
|
"cerebriumai": CerebriumAI,
|
||||||
"cohere": Cohere,
|
"cohere": Cohere,
|
||||||
|
236
langchain/llms/aleph_alpha.py
Normal file
236
langchain/llms/aleph_alpha.py
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
"""Wrapper around Aleph Alpha APIs."""
|
||||||
|
from typing import Any, Dict, List, Optional, Sequence
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Extra, root_validator
|
||||||
|
|
||||||
|
from langchain.llms.base import LLM
|
||||||
|
from langchain.llms.utils import enforce_stop_tokens
|
||||||
|
from langchain.utils import get_from_dict_or_env
|
||||||
|
|
||||||
|
|
||||||
|
class AlephAlpha(LLM, BaseModel):
|
||||||
|
"""Wrapper around Aleph Alpha large language models.
|
||||||
|
|
||||||
|
To use, you should have the ``aleph_alpha_client`` python package installed, and the
|
||||||
|
environment variable ``ALEPH_ALPHA_API_KEY`` set with your API key, or pass
|
||||||
|
it as a named parameter to the constructor.
|
||||||
|
|
||||||
|
Parameters are explained more in depth here:
|
||||||
|
https://github.com/Aleph-Alpha/aleph-alpha-client/blob/c14b7dd2b4325c7da0d6a119f6e76385800e097b/aleph_alpha_client/completion.py#L10
|
||||||
|
|
||||||
|
Example:
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
from langchain.llms import AlephAlpha
|
||||||
|
alpeh_alpha = AlephAlpha(aleph_alpha_api_key="my-api-key")
|
||||||
|
"""
|
||||||
|
|
||||||
|
client: Any #: :meta private:
|
||||||
|
model: Optional[str] = "luminous-base"
|
||||||
|
"""Model name to use."""
|
||||||
|
|
||||||
|
maximum_tokens: int = 64
|
||||||
|
"""The maximum number of tokens to be generated."""
|
||||||
|
|
||||||
|
temperature: float = 0.0
|
||||||
|
"""A non-negative float that tunes the degree of randomness in generation."""
|
||||||
|
|
||||||
|
top_k: int = 0
|
||||||
|
"""Number of most likely tokens to consider at each step."""
|
||||||
|
|
||||||
|
top_p: float = 0.0
|
||||||
|
"""Total probability mass of tokens to consider at each step."""
|
||||||
|
|
||||||
|
presence_penalty: float = 0.0
|
||||||
|
"""Penalizes repeated tokens."""
|
||||||
|
|
||||||
|
frequency_penalty: float = 0.0
|
||||||
|
"""Penalizes repeated tokens according to frequency."""
|
||||||
|
|
||||||
|
repetition_penalties_include_prompt: Optional[bool] = False
|
||||||
|
"""Flag deciding whether presence penalty or frequency penalty are
|
||||||
|
updated from the prompt."""
|
||||||
|
|
||||||
|
use_multiplicative_presence_penalty: Optional[bool] = False
|
||||||
|
"""Flag deciding whether presence penalty is applied
|
||||||
|
multiplicatively (True) or additively (False)."""
|
||||||
|
|
||||||
|
penalty_bias: Optional[str] = None
|
||||||
|
"""Penalty bias for the completion."""
|
||||||
|
|
||||||
|
penalty_exceptions: Optional[List[str]] = None
|
||||||
|
"""List of strings that may be generated without penalty,
|
||||||
|
regardless of other penalty settings"""
|
||||||
|
|
||||||
|
penalty_exceptions_include_stop_sequences: Optional[bool] = None
|
||||||
|
"""Should stop_sequences be included in penalty_exceptions."""
|
||||||
|
|
||||||
|
best_of: Optional[int] = None
|
||||||
|
"""returns the one with the "best of" results
|
||||||
|
(highest log probability per token)
|
||||||
|
"""
|
||||||
|
|
||||||
|
n: int = 1
|
||||||
|
"""How many completions to generate for each prompt."""
|
||||||
|
|
||||||
|
logit_bias: Optional[Dict[int, float]] = None
|
||||||
|
"""The logit bias allows to influence the likelihood of generating tokens."""
|
||||||
|
|
||||||
|
log_probs: Optional[int] = None
|
||||||
|
"""Number of top log probabilities to be returned for each generated token."""
|
||||||
|
|
||||||
|
tokens: Optional[bool] = False
|
||||||
|
"""return tokens of completion."""
|
||||||
|
|
||||||
|
disable_optimizations: Optional[bool] = False
|
||||||
|
|
||||||
|
minimum_tokens: Optional[int] = 0
|
||||||
|
"""Generate at least this number of tokens."""
|
||||||
|
|
||||||
|
echo: bool = False
|
||||||
|
"""Echo the prompt in the completion."""
|
||||||
|
|
||||||
|
use_multiplicative_frequency_penalty: bool = False
|
||||||
|
|
||||||
|
sequence_penalty: float = 0.0
|
||||||
|
|
||||||
|
sequence_penalty_min_length: int = 2
|
||||||
|
|
||||||
|
use_multiplicative_sequence_penalty: bool = False
|
||||||
|
|
||||||
|
completion_bias_inclusion: Optional[Sequence[str]] = None
|
||||||
|
|
||||||
|
completion_bias_inclusion_first_token_only: bool = False
|
||||||
|
|
||||||
|
completion_bias_exclusion: Optional[Sequence[str]] = None
|
||||||
|
|
||||||
|
completion_bias_exclusion_first_token_only: bool = False
|
||||||
|
"""Only consider the first token for the completion_bias_exclusion."""
|
||||||
|
|
||||||
|
contextual_control_threshold: Optional[float] = None
|
||||||
|
"""If set to None, attention control parameters only apply to those tokens that have
|
||||||
|
explicitly been set in the request.
|
||||||
|
If set to a non-None value, control parameters are also applied to similar tokens.
|
||||||
|
"""
|
||||||
|
|
||||||
|
control_log_additive: Optional[bool] = True
|
||||||
|
"""True: apply control by adding the log(control_factor) to attention scores.
|
||||||
|
False: (attention_scores - - attention_scores.min(-1)) * control_factor
|
||||||
|
"""
|
||||||
|
|
||||||
|
repetition_penalties_include_completion: bool = True
|
||||||
|
"""Flag deciding whether presence penalty or frequency penalty
|
||||||
|
are updated from the completion."""
|
||||||
|
|
||||||
|
raw_completion: bool = False
|
||||||
|
"""Force the raw completion of the model to be returned."""
|
||||||
|
|
||||||
|
aleph_alpha_api_key: Optional[str] = None
|
||||||
|
"""API key for Aleph Alpha API."""
|
||||||
|
|
||||||
|
stop_sequences: Optional[List[str]] = None
|
||||||
|
"""Stop sequences to use."""
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
"""Configuration for this pydantic object."""
|
||||||
|
|
||||||
|
extra = Extra.forbid
|
||||||
|
|
||||||
|
@root_validator()
|
||||||
|
def validate_environment(cls, values: Dict) -> Dict:
|
||||||
|
"""Validate that api key and python package exists in environment."""
|
||||||
|
aleph_alpha_api_key = get_from_dict_or_env(
|
||||||
|
values, "aleph_alpha_api_key", "ALEPH_ALPHA_API_KEY"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
import aleph_alpha_client
|
||||||
|
|
||||||
|
values["client"] = aleph_alpha_client.Client(token=aleph_alpha_api_key)
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError(
|
||||||
|
"Could not import aleph_alpha_client python package. "
|
||||||
|
"Please it install it with `pip install aleph_alpha_client`."
|
||||||
|
)
|
||||||
|
return values
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _default_params(self) -> Dict[str, Any]:
|
||||||
|
"""Get the default parameters for calling the Aleph Alpha API."""
|
||||||
|
return {
|
||||||
|
"maximum_tokens": self.maximum_tokens,
|
||||||
|
"temperature": self.temperature,
|
||||||
|
"top_k": self.top_k,
|
||||||
|
"top_p": self.top_p,
|
||||||
|
"presence_penalty": self.presence_penalty,
|
||||||
|
"frequency_penalty": self.frequency_penalty,
|
||||||
|
"n": self.n,
|
||||||
|
"repetition_penalties_include_prompt": self.repetition_penalties_include_prompt, # noqa: E501
|
||||||
|
"use_multiplicative_presence_penalty": self.use_multiplicative_presence_penalty, # noqa: E501
|
||||||
|
"penalty_bias": self.penalty_bias,
|
||||||
|
"penalty_exceptions": self.penalty_exceptions,
|
||||||
|
"penalty_exceptions_include_stop_sequences": self.penalty_exceptions_include_stop_sequences, # noqa: E501
|
||||||
|
"best_of": self.best_of,
|
||||||
|
"logit_bias": self.logit_bias,
|
||||||
|
"log_probs": self.log_probs,
|
||||||
|
"tokens": self.tokens,
|
||||||
|
"disable_optimizations": self.disable_optimizations,
|
||||||
|
"minimum_tokens": self.minimum_tokens,
|
||||||
|
"echo": self.echo,
|
||||||
|
"use_multiplicative_frequency_penalty": self.use_multiplicative_frequency_penalty, # noqa: E501
|
||||||
|
"sequence_penalty": self.sequence_penalty,
|
||||||
|
"sequence_penalty_min_length": self.sequence_penalty_min_length,
|
||||||
|
"use_multiplicative_sequence_penalty": self.use_multiplicative_sequence_penalty, # noqa: E501
|
||||||
|
"completion_bias_inclusion": self.completion_bias_inclusion,
|
||||||
|
"completion_bias_inclusion_first_token_only": self.completion_bias_inclusion_first_token_only, # noqa: E501
|
||||||
|
"completion_bias_exclusion": self.completion_bias_exclusion,
|
||||||
|
"completion_bias_exclusion_first_token_only": self.completion_bias_exclusion_first_token_only, # noqa: E501
|
||||||
|
"contextual_control_threshold": self.contextual_control_threshold,
|
||||||
|
"control_log_additive": self.control_log_additive,
|
||||||
|
"repetition_penalties_include_completion": self.repetition_penalties_include_completion, # noqa: E501
|
||||||
|
"raw_completion": self.raw_completion,
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _identifying_params(self) -> Dict[str, Any]:
|
||||||
|
"""Get the identifying parameters."""
|
||||||
|
return {**{"model": self.model}, **self._default_params}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _llm_type(self) -> str:
|
||||||
|
"""Return type of llm."""
|
||||||
|
return "alpeh_alpha"
|
||||||
|
|
||||||
|
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
|
||||||
|
"""Call out to Aleph Alpha's completion endpoint.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prompt: The prompt to pass into the model.
|
||||||
|
stop: Optional list of stop words to use when generating.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The string generated by the model.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
|
response = alpeh_alpha("Tell me a joke.")
|
||||||
|
"""
|
||||||
|
from aleph_alpha_client import CompletionRequest, Prompt
|
||||||
|
|
||||||
|
params = self._default_params
|
||||||
|
if self.stop_sequences is not None and stop is not None:
|
||||||
|
raise ValueError(
|
||||||
|
"stop sequences found in both the input and default params."
|
||||||
|
)
|
||||||
|
elif self.stop_sequences is not None:
|
||||||
|
params["stop_sequences"] = self.stop_sequences
|
||||||
|
else:
|
||||||
|
params["stop_sequences"] = stop
|
||||||
|
request = CompletionRequest(prompt=Prompt.from_text(prompt), **params)
|
||||||
|
response = self.client.complete(model=self.model, request=request)
|
||||||
|
text = response.completions[0].completion
|
||||||
|
# If stop tokens are provided, Aleph Alpha's endpoint returns them.
|
||||||
|
# In order to make this consistent with other endpoints, we strip them.
|
||||||
|
if stop is not None or self.stop_sequences is not None:
|
||||||
|
text = enforce_stop_tokens(text, params["stop_sequences"])
|
||||||
|
return text
|
128
poetry.lock
generated
128
poetry.lock
generated
@ -12,6 +12,21 @@ files = [
|
|||||||
{file = "absl_py-1.4.0-py3-none-any.whl", hash = "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47"},
|
{file = "absl_py-1.4.0-py3-none-any.whl", hash = "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiodns"
|
||||||
|
version = "3.0.0"
|
||||||
|
description = "Simple DNS resolver for asyncio"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "aiodns-3.0.0-py3-none-any.whl", hash = "sha256:2b19bc5f97e5c936638d28e665923c093d8af2bf3aa88d35c43417fa25d136a2"},
|
||||||
|
{file = "aiodns-3.0.0.tar.gz", hash = "sha256:946bdfabe743fceeeb093c8a010f5d1645f708a241be849e17edfb0e49e08cd6"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
pycares = ">=4.0.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiohttp"
|
name = "aiohttp"
|
||||||
version = "3.8.3"
|
version = "3.8.3"
|
||||||
@ -121,6 +136,21 @@ yarl = ">=1.0,<2.0"
|
|||||||
[package.extras]
|
[package.extras]
|
||||||
speedups = ["Brotli", "aiodns", "cchardet"]
|
speedups = ["Brotli", "aiodns", "cchardet"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aiohttp-retry"
|
||||||
|
version = "2.8.3"
|
||||||
|
description = "Simple retry client for aiohttp"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "aiohttp_retry-2.8.3-py3-none-any.whl", hash = "sha256:3aeeead8f6afe48272db93ced9440cf4eda8b6fd7ee2abb25357b7eb28525b45"},
|
||||||
|
{file = "aiohttp_retry-2.8.3.tar.gz", hash = "sha256:9a8e637e31682ad36e1ff9f8bcba912fcfc7d7041722bc901a4b948da4d71ea9"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
aiohttp = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aiosignal"
|
name = "aiosignal"
|
||||||
version = "1.3.1"
|
version = "1.3.1"
|
||||||
@ -148,6 +178,32 @@ files = [
|
|||||||
{file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
|
{file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "aleph-alpha-client"
|
||||||
|
version = "2.15.0"
|
||||||
|
description = "python client to interact with Aleph Alpha api endpoints"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "aleph-alpha-client-2.15.0.tar.gz", hash = "sha256:b50329572c319fbf0d17fab3e34a46c15f164931f40ff6eb2379c8276d4728fa"},
|
||||||
|
{file = "aleph_alpha_client-2.15.0-py3-none-any.whl", hash = "sha256:1cc0e17cd5f4a578f10e3cc22379f9e6765fed112108bcd84d5e3a77a094c58f"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
aiodns = ">=3.0.0"
|
||||||
|
aiohttp = ">=3.8.3"
|
||||||
|
aiohttp-retry = ">=2.8.3"
|
||||||
|
requests = ">=2.28"
|
||||||
|
tokenizers = ">=0.13.2"
|
||||||
|
urllib3 = ">=1.26"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["black", "ipykernel", "mypy", "nbconvert", "pytest", "pytest-aiohttp", "pytest-cov", "pytest-dotenv", "pytest-httpserver", "types-requests"]
|
||||||
|
docs = ["sphinx", "sphinx-rtd-theme"]
|
||||||
|
test = ["pytest", "pytest-aiohttp", "pytest-cov", "pytest-dotenv", "pytest-httpserver"]
|
||||||
|
types = ["mypy", "types-requests"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anthropic"
|
name = "anthropic"
|
||||||
version = "0.2.2"
|
version = "0.2.2"
|
||||||
@ -4066,6 +4122,74 @@ files = [
|
|||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
pyasn1 = ">=0.4.6,<0.5.0"
|
pyasn1 = ">=0.4.6,<0.5.0"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pycares"
|
||||||
|
version = "4.3.0"
|
||||||
|
description = "Python interface for c-ares"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:19c9cdd3322d422931982939773e453e491dfc5c0b2e23d7266959315c7a0824"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e56e9cdf46a092970dc4b75bbabddea9f480be5eeadc3fcae3eb5c6807c4136"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c75a6241c79b935048272cb77df498da64b8defc8c4b29fdf9870e43ba4cbb4"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d8654fac3742791b8bef59d1fbb3e19ae6a5c48876a6d98659f7c66ee546c4"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebf50b049a245880f1aa16a6f72c4408e0a65b49ea1d3bf13383a44a2cabd2bf"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:84daf560962763c0359fd79c750ef480f0fda40c08b57765088dbe362e8dc452"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:978d10da7ee74b9979c494afa8b646411119ad0186a29c7f13c72bb4295630c6"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c5b9d7fe52eb3d243f5ead58d5c0011884226d961df8360a34618c38c7515"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-win32.whl", hash = "sha256:da7c7089ae617317d2cbe38baefd3821387b3bfef7b3ee5b797b871cb1257974"},
|
||||||
|
{file = "pycares-4.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7106dc683db30e1d851283b7b9df7a5ea4964d6bdd000d918d91d4b1f9bed329"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4e7a24ecef0b1933f2a3fdbf328d1b529a76cda113f8364fa0742e5b3bd76566"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7abccc2aa4771c06994e4d9ed596453061e2b8846f887d9c98a64ccdaf4790a"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531fed46c5ed798a914c3207be4ae7b297c4d09e4183d3cf8fd9ee59a55d5080"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c9335175af0c64a1e0ba67bdd349eb62d4eea0ad02c235ccdf0d535fd20f323"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5f0e95535027d2dcd51e780410632b0d3ed7e9e5ceb25dc0fe937f2c2960079"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3692179ce5fb96908ba342e1e5303608d0c976f0d5d4619fa9d3d6d9d5a9a1b4"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c4cb6cc7fe8e0606d30b60367f59fe26d1472e88555d61e202db70dea5c8edb"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3215445396c74103e2054e6b349d9e85883ceda2006d0039fc2d58c9b11818a2"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-win32.whl", hash = "sha256:6a0c0c3a0adf490bba9dbb37dbd07ec81e4a6584f095036ac34f06a633710ffe"},
|
||||||
|
{file = "pycares-4.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:995cb37cc39bd40ca87bb16555a0f7724f3be30d9f9059a4caab2fde45b1b903"},
|
||||||
|
{file = "pycares-4.3.0-cp36-cp36m-win32.whl", hash = "sha256:4c9187be72449c975c11daa1d94d7ddcc494f8a4c37a6c18f977cd7024a531d9"},
|
||||||
|
{file = "pycares-4.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d7405ba10a2903a58b8b0faedcb54994c9ee002ad01963587fabf93e7e479783"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:40aaa12081495f879f11f4cfc95edfec1ea14711188563102f9e33fe98728fac"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4972cac24b66c5997f3a3e2cb608e408066d80103d443e36d626a88a287b9ae7"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35886dba7aa5b73affca8729aeb5a1f5e94d3d9a764adb1b7e75bafca44eeca5"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5cea6e1f3be016f155d60f27f16c1074d58b4d6e123228fdbc3326d076016af8"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3a9fd2665b053afb39226ac6f8137a60910ca7729358456df2fb94866f4297de"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e8e9195f869120e44e0aa0a6098bb5c19947f4753054365891f592e6f9eab3ef"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:674486ecf2afb25ee219171b07cdaba481a1aaa2dabb155779c7be9ded03eaa9"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-win32.whl", hash = "sha256:1b6cd3161851499b6894d1e23bfd633e7b775472f5af35ae35409c4a47a2d45e"},
|
||||||
|
{file = "pycares-4.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:710120c97b9afdba443564350c3f5f72fd9aae74d95b73dc062ca8ac3d7f36d7"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9103649bd29d84bc6bcfaf09def9c0592bbc766018fad19d76d09989608b915d"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c072dbaf73cb5434279578dc35322867d8d5df053e14fdcdcc589994ba4804ae"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008531733f9c7a976b59c7760a3672b191159fd69ae76c01ca051f20b5e44164"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2aae02d97d77dcff840ab55f86cb8b99bf644acbca17e1edb7048408b9782088"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:257953ae6d400a934fd9193aeb20990ac84a78648bdf5978e998bd007a4045cd"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c28d481efae26936ec08cb6beea305f4b145503b152cf2c4dc68cc4ad9644f0e"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:976249b39037dbfb709ccf7e1c40d2785905a0065536385d501b94570cfed96d"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:98568c30cfab6b327d94ae1acdf85bbba4cffd415980804985d34ca07e6f4791"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-win32.whl", hash = "sha256:a2f3c4f49f43162f7e684419d9834c2c8ec165e54cb8dc47aa9dc0c2132701c0"},
|
||||||
|
{file = "pycares-4.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:1730ef93e33e4682fbbf0e7fb19df2ed9822779d17de8ea6e20d5b0d71c1d2be"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a26b3f1684557025da26ce65d076619890c82b95e38cc7284ce51c3539a1ce8"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:86112cce01655b9f63c5e53b74722084e88e784a7a8ad138d373440337c591c9"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01465a191dc78e923884bb45cd63c7e012623e520cf7ed67e542413ee334804"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9fd5d6012f3ee8c8038cbfe16e988bbd17b2f21eea86650874bf63757ee6161"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa36b8ea91eae20b5c7205f3e6654423f066af24a1df02b274770a96cbcafaa7"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:61019151130557c1788cae52e4f2f388a7520c9d92574f3a0d61c974c6740db0"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:231962bb46274c52632469a1e686fab065dbd106dbef586de4f7fb101e297587"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c979512fa51c7ccef5204fe10ed4e5c44c2bce5f335fe98a3e423f1672bd7d4"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-win32.whl", hash = "sha256:655cf0df862ce3847a60e1a106dafa2ba2c14e6636bac49e874347acdc7312dc"},
|
||||||
|
{file = "pycares-4.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:36f2251ad0f99a5ce13df45c94c3161d9734c9e9fa2b9b4cc163b853ca170dc5"},
|
||||||
|
{file = "pycares-4.3.0.tar.gz", hash = "sha256:c542696f6dac978e9d99192384745a65f80a7d9450501151e4a7563e06010d45"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
cffi = ">=1.5.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
idna = ["idna (>=2.1)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pycodestyle"
|
name = "pycodestyle"
|
||||||
version = "2.10.0"
|
version = "2.10.0"
|
||||||
@ -6177,7 +6301,7 @@ name = "tokenizers"
|
|||||||
version = "0.13.2"
|
version = "0.13.2"
|
||||||
description = "Fast and Customizable Tokenizers"
|
description = "Fast and Customizable Tokenizers"
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = true
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
{file = "tokenizers-0.13.2-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:a6f36b1b499233bb4443b5e57e20630c5e02fba61109632f5e00dab970440157"},
|
{file = "tokenizers-0.13.2-cp310-cp310-macosx_10_11_x86_64.whl", hash = "sha256:a6f36b1b499233bb4443b5e57e20630c5e02fba61109632f5e00dab970440157"},
|
||||||
@ -7068,4 +7192,4 @@ llms = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "manifes
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.8.1,<4.0"
|
python-versions = ">=3.8.1,<4.0"
|
||||||
content-hash = "7997201f64373247d8799baed84a5ad11ab3d92e26cc2114b26e734cfb9664a4"
|
content-hash = "2f916a8467f87cb850664b564c317dab569c9fee490e05308ac85427ef3abadc"
|
||||||
|
@ -48,6 +48,7 @@ sentence-transformers = {version = "^2", optional = true}
|
|||||||
aiohttp = "^3.8.3"
|
aiohttp = "^3.8.3"
|
||||||
pypdf = {version = "^3.4.0", optional = true}
|
pypdf = {version = "^3.4.0", optional = true}
|
||||||
networkx = {version="^2.6.3", optional = true}
|
networkx = {version="^2.6.3", optional = true}
|
||||||
|
aleph-alpha-client = "^2.15.0"
|
||||||
|
|
||||||
[tool.poetry.group.docs.dependencies]
|
[tool.poetry.group.docs.dependencies]
|
||||||
autodoc_pydantic = "^1.8.0"
|
autodoc_pydantic = "^1.8.0"
|
||||||
|
10
tests/integration_tests/llms/test_aleph_alpha.py
Normal file
10
tests/integration_tests/llms/test_aleph_alpha.py
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
"""Test Aleph Alpha API wrapper."""
|
||||||
|
|
||||||
|
from langchain.llms.aleph_alpha import AlephAlpha
|
||||||
|
|
||||||
|
|
||||||
|
def test_aleph_alpha_call() -> None:
|
||||||
|
"""Test valid call to cohere."""
|
||||||
|
llm = AlephAlpha(maximum_tokens=10)
|
||||||
|
output = llm("Say foo:")
|
||||||
|
assert isinstance(output, str)
|
Loading…
Reference in New Issue
Block a user