mirror of
https://github.com/hwchase17/langchain.git
synced 2025-08-15 23:57:21 +00:00
Merge branch 'master' into sqldocstore_postgres_compat
This commit is contained in:
commit
88b08849f4
@ -62,23 +62,25 @@
|
|||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from langchain.agents import AgentType, initialize_agent, load_tools\n",
|
"from langchain.agents import AgentExecutor, OpenAIFunctionsAgent\n",
|
||||||
|
"from langchain.tools import Tool\n",
|
||||||
"from langchain_openai import OpenAI\n",
|
"from langchain_openai import OpenAI\n",
|
||||||
"\n",
|
"\n",
|
||||||
"llm = OpenAI(temperature=0)\n",
|
"llm = OpenAI(temperature=0)\n",
|
||||||
"\n",
|
"\n",
|
||||||
"tools = load_tools(\n",
|
"tools = Tool(\n",
|
||||||
" [\"awslambda\"],\n",
|
" name=\"email-sender\",\n",
|
||||||
" awslambda_tool_name=\"email-sender\",\n",
|
" description=\"Sends an email with the specified content to test@testing123.com\",\n",
|
||||||
" awslambda_tool_description=\"sends an email with the specified content to test@testing123.com\",\n",
|
" func=lambda input_text: f\"Email sent to test@testing123.com with content: {input_text}\",\n",
|
||||||
" function_name=\"testFunction1\",\n",
|
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"agent = initialize_agent(\n",
|
"agent = OpenAIFunctionsAgent(llm=llm, tools=[tools])\n",
|
||||||
" tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True\n",
|
|
||||||
")\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"agent.run(\"Send an email to test@testing123.com saying hello world.\")"
|
"agent_executor = AgentExecutor(agent=agent, tools=[tools], verbose=True)\n",
|
||||||
|
"\n",
|
||||||
|
"agent_executor.invoke(\n",
|
||||||
|
" {\"input\": \" Send an email to test@testing123.com saying hello world.\"}\n",
|
||||||
|
")"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -45,24 +45,24 @@ class BaseImageBlobParser(BaseBlobParser):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
from PIL import Image as Img
|
from PIL import Image as Img
|
||||||
|
|
||||||
with blob.as_bytes_io() as buf:
|
|
||||||
if blob.mimetype == "application/x-npy":
|
|
||||||
img = Img.fromarray(numpy.load(buf))
|
|
||||||
else:
|
|
||||||
img = Img.open(buf)
|
|
||||||
content = self._analyze_image(img)
|
|
||||||
logger.debug("Image text: %s", content.replace("\n", "\\n"))
|
|
||||||
yield Document(
|
|
||||||
page_content=content,
|
|
||||||
metadata={**blob.metadata, **{"source": blob.source}},
|
|
||||||
)
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
raise ImportError(
|
raise ImportError(
|
||||||
"`Pillow` package not found, please install it with "
|
"`Pillow` package not found, please install it with "
|
||||||
"`pip install Pillow`"
|
"`pip install Pillow`"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
with blob.as_bytes_io() as buf:
|
||||||
|
if blob.mimetype == "application/x-npy":
|
||||||
|
img = Img.fromarray(numpy.load(buf))
|
||||||
|
else:
|
||||||
|
img = Img.open(buf)
|
||||||
|
content = self._analyze_image(img)
|
||||||
|
logger.debug("Image text: %s", content.replace("\n", "\\n"))
|
||||||
|
yield Document(
|
||||||
|
page_content=content,
|
||||||
|
metadata={**blob.metadata, **{"source": blob.source}},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RapidOCRBlobParser(BaseImageBlobParser):
|
class RapidOCRBlobParser(BaseImageBlobParser):
|
||||||
"""Parser for extracting text from images using the RapidOCR library.
|
"""Parser for extracting text from images using the RapidOCR library.
|
||||||
|
@ -88,8 +88,6 @@ from typing_extensions import Self
|
|||||||
|
|
||||||
from langchain_groq.version import __version__
|
from langchain_groq.version import __version__
|
||||||
|
|
||||||
WARNED_DEFAULT_MODEL = False
|
|
||||||
|
|
||||||
|
|
||||||
class ChatGroq(BaseChatModel):
|
class ChatGroq(BaseChatModel):
|
||||||
"""`Groq` Chat large language models API.
|
"""`Groq` Chat large language models API.
|
||||||
@ -305,7 +303,7 @@ class ChatGroq(BaseChatModel):
|
|||||||
|
|
||||||
client: Any = Field(default=None, exclude=True) #: :meta private:
|
client: Any = Field(default=None, exclude=True) #: :meta private:
|
||||||
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
async_client: Any = Field(default=None, exclude=True) #: :meta private:
|
||||||
model_name: str = Field(default="mixtral-8x7b-32768", alias="model")
|
model_name: str = Field(alias="model")
|
||||||
"""Model name to use."""
|
"""Model name to use."""
|
||||||
temperature: float = 0.7
|
temperature: float = 0.7
|
||||||
"""What sampling temperature to use."""
|
"""What sampling temperature to use."""
|
||||||
@ -353,27 +351,6 @@ class ChatGroq(BaseChatModel):
|
|||||||
populate_by_name=True,
|
populate_by_name=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
@model_validator(mode="before")
|
|
||||||
@classmethod
|
|
||||||
def warn_default_model(cls, values: Dict[str, Any]) -> Any:
|
|
||||||
"""Warning anticipating removal of default model."""
|
|
||||||
# TODO(ccurme): remove this warning in 0.3.0 when default model is removed
|
|
||||||
global WARNED_DEFAULT_MODEL
|
|
||||||
if (
|
|
||||||
"model" not in values
|
|
||||||
and "model_name" not in values
|
|
||||||
and not WARNED_DEFAULT_MODEL
|
|
||||||
):
|
|
||||||
warnings.warn(
|
|
||||||
"Groq is retiring the default model for ChatGroq, mixtral-8x7b-32768, "
|
|
||||||
"on March 20, 2025. Requests with the default model will start failing "
|
|
||||||
"on that date. Version 0.3.0 of langchain-groq will remove the "
|
|
||||||
"default. Please specify `model` explicitly, e.g., "
|
|
||||||
"`model='mistral-saba-24b'` or `model='llama-3.3-70b-versatile'`.",
|
|
||||||
)
|
|
||||||
WARNED_DEFAULT_MODEL = True
|
|
||||||
return values
|
|
||||||
|
|
||||||
@model_validator(mode="before")
|
@model_validator(mode="before")
|
||||||
@classmethod
|
@classmethod
|
||||||
def build_extra(cls, values: Dict[str, Any]) -> Any:
|
def build_extra(cls, values: Dict[str, Any]) -> Any:
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import warnings
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
@ -280,23 +279,3 @@ def test_groq_serialization() -> None:
|
|||||||
|
|
||||||
# Ensure a None was preserved
|
# Ensure a None was preserved
|
||||||
assert llm.groq_api_base == llm2.groq_api_base
|
assert llm.groq_api_base == llm2.groq_api_base
|
||||||
|
|
||||||
|
|
||||||
def test_groq_warns_default_model() -> None:
|
|
||||||
"""Test that a warning is raised if a default model is used."""
|
|
||||||
|
|
||||||
# Delete this test in 0.3 release, when the default model is removed.
|
|
||||||
|
|
||||||
# Test no warning if model is specified
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.simplefilter("error")
|
|
||||||
ChatGroq(model="foo")
|
|
||||||
|
|
||||||
# Test warns if default model is used
|
|
||||||
with pytest.warns(match="default model"):
|
|
||||||
ChatGroq()
|
|
||||||
|
|
||||||
# Test only warns once
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
warnings.simplefilter("error")
|
|
||||||
ChatGroq()
|
|
||||||
|
Loading…
Reference in New Issue
Block a user