diff --git a/libs/community/langchain_community/llms/huggingface_endpoint.py b/libs/community/langchain_community/llms/huggingface_endpoint.py index df25bf367e8..b4bbf96d820 100644 --- a/libs/community/langchain_community/llms/huggingface_endpoint.py +++ b/libs/community/langchain_community/llms/huggingface_endpoint.py @@ -46,9 +46,9 @@ class HuggingFaceEndpoint(LLM): print(llm("What is Deep Learning?")) # Streaming response example - from langchain_community.callbacks import streaming_stdout + from langchain_core.callbacks.streaming_stdout import StreamingStdOutCallbackHandler - callbacks = [streaming_stdout.StreamingStdOutCallbackHandler()] + callbacks = [StreamingStdOutCallbackHandler()] llm = HuggingFaceEndpoint( endpoint_url="http://localhost:8010/", max_new_tokens=512, @@ -63,7 +63,7 @@ class HuggingFaceEndpoint(LLM): ) print(llm("What is Deep Learning?")) - """ + """ # noqa: E501 endpoint_url: Optional[str] = None """Endpoint URL to use."""