mirror of
https://github.com/hwchase17/langchain.git
synced 2025-09-11 16:01:33 +00:00
community[major], core[patch], langchain[patch], experimental[patch]: Create langchain-community (#14463)
Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
This commit is contained in:
85
libs/community/langchain_community/llms/human.py
Normal file
85
libs/community/langchain_community/llms/human.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from typing import Any, Callable, List, Mapping, Optional
|
||||
|
||||
from langchain_core.callbacks import CallbackManagerForLLMRun
|
||||
from langchain_core.language_models.llms import LLM
|
||||
from langchain_core.pydantic_v1 import Field
|
||||
|
||||
from langchain_community.llms.utils import enforce_stop_tokens
|
||||
|
||||
|
||||
def _display_prompt(prompt: str) -> None:
|
||||
"""Displays the given prompt to the user."""
|
||||
print(f"\n{prompt}")
|
||||
|
||||
|
||||
def _collect_user_input(
|
||||
separator: Optional[str] = None, stop: Optional[List[str]] = None
|
||||
) -> str:
|
||||
"""Collects and returns user input as a single string."""
|
||||
separator = separator or "\n"
|
||||
lines = []
|
||||
|
||||
while True:
|
||||
line = input()
|
||||
if not line:
|
||||
break
|
||||
lines.append(line)
|
||||
|
||||
if stop and any(seq in line for seq in stop):
|
||||
break
|
||||
# Combine all lines into a single string
|
||||
multi_line_input = separator.join(lines)
|
||||
return multi_line_input
|
||||
|
||||
|
||||
class HumanInputLLM(LLM):
|
||||
"""
|
||||
It returns user input as the response.
|
||||
"""
|
||||
|
||||
input_func: Callable = Field(default_factory=lambda: _collect_user_input)
|
||||
prompt_func: Callable[[str], None] = Field(default_factory=lambda: _display_prompt)
|
||||
separator: str = "\n"
|
||||
input_kwargs: Mapping[str, Any] = {}
|
||||
prompt_kwargs: Mapping[str, Any] = {}
|
||||
|
||||
@property
|
||||
def _identifying_params(self) -> Mapping[str, Any]:
|
||||
"""
|
||||
Returns an empty dictionary as there are no identifying parameters.
|
||||
"""
|
||||
return {}
|
||||
|
||||
@property
|
||||
def _llm_type(self) -> str:
|
||||
"""Returns the type of LLM."""
|
||||
return "human-input"
|
||||
|
||||
def _call(
|
||||
self,
|
||||
prompt: str,
|
||||
stop: Optional[List[str]] = None,
|
||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||
**kwargs: Any,
|
||||
) -> str:
|
||||
"""
|
||||
Displays the prompt to the user and returns their input as a response.
|
||||
|
||||
Args:
|
||||
prompt (str): The prompt to be displayed to the user.
|
||||
stop (Optional[List[str]]): A list of stop strings.
|
||||
run_manager (Optional[CallbackManagerForLLMRun]): Currently not used.
|
||||
|
||||
Returns:
|
||||
str: The user's input as a response.
|
||||
"""
|
||||
self.prompt_func(prompt, **self.prompt_kwargs)
|
||||
user_input = self.input_func(
|
||||
separator=self.separator, stop=stop, **self.input_kwargs
|
||||
)
|
||||
|
||||
if stop is not None:
|
||||
# I believe this is required since the stop tokens
|
||||
# are not enforced by the human themselves
|
||||
user_input = enforce_stop_tokens(user_input, stop)
|
||||
return user_input
|
Reference in New Issue
Block a user