mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-08 04:25:46 +00:00
- *[x] **PR title**: "community: adding langchain-predictionguard partner package documentation" - *[x] **PR message**: - **Description:** This PR adds documentation for the langchain-predictionguard package to main langchain repo, along with deprecating current Prediction Guard LLMs package. The LLMs package was previously broken, so I also updated it one final time to allow it to continue working from this point onward. . This enables users to chat with LLMs through the Prediction Guard ecosystem. - **Package Links**: - [PyPI](https://pypi.org/project/langchain-predictionguard/) - [Github Repo](https://www.github.com/predictionguard/langchain-predictionguard) - **Issue:** None - **Dependencies:** None - **Twitter handle:** [@predictionguard](https://x.com/predictionguard) - *[x] **Add tests and docs**: All docs have been added for the partner package, and the current LLMs package test was updated to reflect changes. - *[x] **Lint and test**: Linting tests are all passing. --------- Co-authored-by: ccurme <chester.curme@gmail.com>
29 lines
791 B
Python
29 lines
791 B
Python
"""Test Prediction Guard API wrapper."""
|
|
|
|
import pytest
|
|
|
|
from langchain_community.llms.predictionguard import PredictionGuard
|
|
|
|
|
|
def test_predictionguard_invoke() -> None:
|
|
"""Test valid call to prediction guard."""
|
|
llm = PredictionGuard(model="Hermes-3-Llama-3.1-8B") # type: ignore[call-arg]
|
|
output = llm.invoke("Tell a joke.")
|
|
assert isinstance(output, str)
|
|
|
|
|
|
def test_predictionguard_pii() -> None:
|
|
llm = PredictionGuard(
|
|
model="Hermes-3-Llama-3.1-8B",
|
|
predictionguard_input={"pii": "block"},
|
|
max_tokens=100,
|
|
temperature=1.0,
|
|
)
|
|
|
|
messages = [
|
|
"Hello, my name is John Doe and my SSN is 111-22-3333",
|
|
]
|
|
|
|
with pytest.raises(ValueError, match=r"Could not make prediction. pii detected"):
|
|
llm.invoke(messages)
|