mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-18 08:03:36 +00:00
community: adding langchain-predictionguard partner package documentation (#28832)
- *[x] **PR title**: "community: adding langchain-predictionguard partner package documentation" - *[x] **PR message**: - **Description:** This PR adds documentation for the langchain-predictionguard package to main langchain repo, along with deprecating current Prediction Guard LLMs package. The LLMs package was previously broken, so I also updated it one final time to allow it to continue working from this point onward. . This enables users to chat with LLMs through the Prediction Guard ecosystem. - **Package Links**: - [PyPI](https://pypi.org/project/langchain-predictionguard/) - [Github Repo](https://www.github.com/predictionguard/langchain-predictionguard) - **Issue:** None - **Dependencies:** None - **Twitter handle:** [@predictionguard](https://x.com/predictionguard) - *[x] **Add tests and docs**: All docs have been added for the partner package, and the current LLMs package test was updated to reflect changes. - *[x] **Lint and test**: Linting tests are all passing. --------- Co-authored-by: ccurme <chester.curme@gmail.com>
This commit is contained in:
@@ -1,10 +1,28 @@
|
||||
"""Test Prediction Guard API wrapper."""
|
||||
|
||||
import pytest
|
||||
|
||||
from langchain_community.llms.predictionguard import PredictionGuard
|
||||
|
||||
|
||||
def test_predictionguard_call() -> None:
|
||||
def test_predictionguard_invoke() -> None:
|
||||
"""Test valid call to prediction guard."""
|
||||
llm = PredictionGuard(model="OpenAI-text-davinci-003") # type: ignore[call-arg]
|
||||
output = llm.invoke("Say foo:")
|
||||
llm = PredictionGuard(model="Hermes-3-Llama-3.1-8B") # type: ignore[call-arg]
|
||||
output = llm.invoke("Tell a joke.")
|
||||
assert isinstance(output, str)
|
||||
|
||||
|
||||
def test_predictionguard_pii() -> None:
|
||||
llm = PredictionGuard(
|
||||
model="Hermes-3-Llama-3.1-8B",
|
||||
predictionguard_input={"pii": "block"},
|
||||
max_tokens=100,
|
||||
temperature=1.0,
|
||||
)
|
||||
|
||||
messages = [
|
||||
"Hello, my name is John Doe and my SSN is 111-22-3333",
|
||||
]
|
||||
|
||||
with pytest.raises(ValueError, match=r"Could not make prediction. pii detected"):
|
||||
llm.invoke(messages)
|
||||
|
Reference in New Issue
Block a user