mirror of
				https://github.com/hwchase17/langchain.git
				synced 2025-10-30 23:29:54 +00:00 
			
		
		
		
	Moved the following modules to new package langchain-community in a backwards compatible fashion: ``` mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community mv langchain/langchain/adapters community/langchain_community mv langchain/langchain/callbacks community/langchain_community/callbacks mv langchain/langchain/chat_loaders community/langchain_community mv langchain/langchain/chat_models community/langchain_community mv langchain/langchain/document_loaders community/langchain_community mv langchain/langchain/docstore community/langchain_community mv langchain/langchain/document_transformers community/langchain_community mv langchain/langchain/embeddings community/langchain_community mv langchain/langchain/graphs community/langchain_community mv langchain/langchain/llms community/langchain_community mv langchain/langchain/memory/chat_message_histories community/langchain_community mv langchain/langchain/retrievers community/langchain_community mv langchain/langchain/storage community/langchain_community mv langchain/langchain/tools community/langchain_community mv langchain/langchain/utilities community/langchain_community mv langchain/langchain/vectorstores community/langchain_community mv langchain/langchain/agents/agent_toolkits community/langchain_community mv langchain/langchain/cache.py community/langchain_community ``` Moved the following to core ``` mv langchain/langchain/utils/json_schema.py core/langchain_core/utils mv langchain/langchain/utils/html.py core/langchain_core/utils mv langchain/langchain/utils/strings.py core/langchain_core/utils cat langchain/langchain/utils/env.py >> core/langchain_core/utils/env.py rm langchain/langchain/utils/env.py ``` See .scripts/community_split/script_integrations.sh for all changes
		
			
				
	
	
		
			124 lines
		
	
	
		
			3.6 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			124 lines
		
	
	
		
			3.6 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| """Integration tests for the langchain tracer module."""
 | |
| import asyncio
 | |
| import os
 | |
| 
 | |
| from aiohttp import ClientSession
 | |
| 
 | |
| from langchain_community.callbacks import wandb_tracing_enabled
 | |
| from langchain_community.llms import OpenAI
 | |
| 
 | |
| questions = [
 | |
|     (
 | |
|         "Who won the US Open men's final in 2019? "
 | |
|         "What is his age raised to the 0.334 power?"
 | |
|     ),
 | |
|     (
 | |
|         "Who is Olivia Wilde's boyfriend? "
 | |
|         "What is his current age raised to the 0.23 power?"
 | |
|     ),
 | |
|     (
 | |
|         "Who won the most recent formula 1 grand prix? "
 | |
|         "What is their age raised to the 0.23 power?"
 | |
|     ),
 | |
|     (
 | |
|         "Who won the US Open women's final in 2019? "
 | |
|         "What is her age raised to the 0.34 power?"
 | |
|     ),
 | |
|     ("Who is Beyonce's husband? " "What is his age raised to the 0.19 power?"),
 | |
| ]
 | |
| 
 | |
| 
 | |
| def test_tracing_sequential() -> None:
 | |
|     from langchain.agents import AgentType, initialize_agent, load_tools
 | |
| 
 | |
|     os.environ["LANGCHAIN_WANDB_TRACING"] = "true"
 | |
|     os.environ["WANDB_PROJECT"] = "langchain-tracing"
 | |
| 
 | |
|     for q in questions[:3]:
 | |
|         llm = OpenAI(temperature=0)
 | |
|         tools = load_tools(
 | |
|             ["llm-math", "serpapi"],
 | |
|             llm=llm,
 | |
|         )
 | |
|         agent = initialize_agent(
 | |
|             tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
 | |
|         )
 | |
|         agent.run(q)
 | |
| 
 | |
| 
 | |
| def test_tracing_session_env_var() -> None:
 | |
|     from langchain.agents import AgentType, initialize_agent, load_tools
 | |
| 
 | |
|     os.environ["LANGCHAIN_WANDB_TRACING"] = "true"
 | |
| 
 | |
|     llm = OpenAI(temperature=0)
 | |
|     tools = load_tools(
 | |
|         ["llm-math", "serpapi"],
 | |
|         llm=llm,
 | |
|     )
 | |
|     agent = initialize_agent(
 | |
|         tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
 | |
|     )
 | |
|     agent.run(questions[0])
 | |
| 
 | |
| 
 | |
| async def test_tracing_concurrent() -> None:
 | |
|     from langchain.agents import AgentType, initialize_agent, load_tools
 | |
| 
 | |
|     os.environ["LANGCHAIN_WANDB_TRACING"] = "true"
 | |
|     aiosession = ClientSession()
 | |
|     llm = OpenAI(temperature=0)
 | |
|     async_tools = load_tools(
 | |
|         ["llm-math", "serpapi"],
 | |
|         llm=llm,
 | |
|         aiosession=aiosession,
 | |
|     )
 | |
|     agent = initialize_agent(
 | |
|         async_tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
 | |
|     )
 | |
|     tasks = [agent.arun(q) for q in questions[:3]]
 | |
|     await asyncio.gather(*tasks)
 | |
|     await aiosession.close()
 | |
| 
 | |
| 
 | |
| def test_tracing_context_manager() -> None:
 | |
|     from langchain.agents import AgentType, initialize_agent, load_tools
 | |
| 
 | |
|     llm = OpenAI(temperature=0)
 | |
|     tools = load_tools(
 | |
|         ["llm-math", "serpapi"],
 | |
|         llm=llm,
 | |
|     )
 | |
|     agent = initialize_agent(
 | |
|         tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
 | |
|     )
 | |
|     if "LANGCHAIN_WANDB_TRACING" in os.environ:
 | |
|         del os.environ["LANGCHAIN_WANDB_TRACING"]
 | |
|     with wandb_tracing_enabled():
 | |
|         agent.run(questions[0])  # this should be traced
 | |
| 
 | |
|     agent.run(questions[0])  # this should not be traced
 | |
| 
 | |
| 
 | |
| async def test_tracing_context_manager_async() -> None:
 | |
|     from langchain.agents import AgentType, initialize_agent, load_tools
 | |
| 
 | |
|     llm = OpenAI(temperature=0)
 | |
|     async_tools = load_tools(
 | |
|         ["llm-math", "serpapi"],
 | |
|         llm=llm,
 | |
|     )
 | |
|     agent = initialize_agent(
 | |
|         async_tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True
 | |
|     )
 | |
|     if "LANGCHAIN_WANDB_TRACING" in os.environ:
 | |
|         del os.environ["LANGCHAIN_TRACING"]
 | |
| 
 | |
|     # start a background task
 | |
|     task = asyncio.create_task(agent.arun(questions[0]))  # this should not be traced
 | |
|     with wandb_tracing_enabled():
 | |
|         tasks = [agent.arun(q) for q in questions[1:4]]  # these should be traced
 | |
|         await asyncio.gather(*tasks)
 | |
| 
 | |
|     await task
 |