mirror of
https://github.com/hwchase17/langchain.git
synced 2025-04-28 03:51:50 +00:00
update
This commit is contained in:
parent
b8c04d0ece
commit
dffb2b1fc9
@ -32,6 +32,7 @@ from urllib.parse import urlparse
|
|||||||
import certifi
|
import certifi
|
||||||
import openai
|
import openai
|
||||||
import tiktoken
|
import tiktoken
|
||||||
|
from langchain_core._api.beta_decorator import warn_beta
|
||||||
from langchain_core._api.deprecation import deprecated
|
from langchain_core._api.deprecation import deprecated
|
||||||
from langchain_core.callbacks import (
|
from langchain_core.callbacks import (
|
||||||
AsyncCallbackManagerForLLMRun,
|
AsyncCallbackManagerForLLMRun,
|
||||||
@ -416,6 +417,7 @@ def _handle_openai_bad_request(e: openai.BadRequestError) -> None:
|
|||||||
|
|
||||||
|
|
||||||
_MessageContent = Union[str, list[Union[str, dict]]]
|
_MessageContent = Union[str, list[Union[str, dict]]]
|
||||||
|
WARNED_IMAGE_GEN_BETA = False
|
||||||
|
|
||||||
|
|
||||||
def _get_image_bytes_from_content(
|
def _get_image_bytes_from_content(
|
||||||
@ -1279,10 +1281,20 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
run_manager: Optional[CallbackManagerForLLMRun] = None,
|
||||||
**kwargs: Any,
|
**kwargs: Any,
|
||||||
) -> ChatResult:
|
) -> ChatResult:
|
||||||
|
global WARNED_IMAGE_GEN_BETA
|
||||||
|
if not WARNED_IMAGE_GEN_BETA:
|
||||||
|
warn_beta(message="Image generation via ChatOpenAI is in beta.")
|
||||||
|
WARNED_IMAGE_GEN_BETA = True
|
||||||
|
|
||||||
prompt = messages[-1].text()
|
prompt = messages[-1].text()
|
||||||
|
|
||||||
|
# Get last set of images
|
||||||
|
for message in reversed(messages):
|
||||||
|
images = _get_image_bytes_from_content(message.content)
|
||||||
|
if images:
|
||||||
|
break
|
||||||
|
else:
|
||||||
images = []
|
images = []
|
||||||
for message in messages:
|
|
||||||
images.extend(_get_image_bytes_from_content(message.content))
|
|
||||||
if images:
|
if images:
|
||||||
result: ImagesResponse = self.root_client.images.edit(
|
result: ImagesResponse = self.root_client.images.edit(
|
||||||
model=self.model_name, image=images, prompt=prompt, **kwargs
|
model=self.model_name, image=images, prompt=prompt, **kwargs
|
||||||
@ -1291,7 +1303,27 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
result = self.root_client.images.generate(
|
result = self.root_client.images.generate(
|
||||||
model=self.model_name, prompt=prompt, **kwargs
|
model=self.model_name, prompt=prompt, **kwargs
|
||||||
)
|
)
|
||||||
output_message = AIMessage(content="", response_metadata=result.model_dump())
|
image_blocks = []
|
||||||
|
if result.data:
|
||||||
|
for image in result.data:
|
||||||
|
if image.b64_json:
|
||||||
|
image_blocks.append(
|
||||||
|
{
|
||||||
|
"type": "image",
|
||||||
|
"source_type": "base64",
|
||||||
|
"data": image.b64_json,
|
||||||
|
"mime_type": "image/png",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if result.usage:
|
||||||
|
usage_metadata = _create_usage_metadata_responses(result.usage.model_dump())
|
||||||
|
else:
|
||||||
|
usage_metadata = None
|
||||||
|
output_message = AIMessage(
|
||||||
|
content=image_blocks or "", # type: ignore[arg-type]
|
||||||
|
response_metadata={"created": result.created},
|
||||||
|
usage_metadata=usage_metadata,
|
||||||
|
)
|
||||||
return ChatResult(generations=[ChatGeneration(message=output_message)])
|
return ChatResult(generations=[ChatGeneration(message=output_message)])
|
||||||
|
|
||||||
def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]:
|
def _get_encoding_model(self) -> tuple[str, tiktoken.Encoding]:
|
||||||
|
@ -8,7 +8,7 @@ license = { text = "MIT" }
|
|||||||
requires-python = "<4.0,>=3.9"
|
requires-python = "<4.0,>=3.9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"langchain-core<1.0.0,>=0.3.53",
|
"langchain-core<1.0.0,>=0.3.53",
|
||||||
"openai<2.0.0,>=1.68.2",
|
"openai<2.0.0,>=1.76.0",
|
||||||
"tiktoken<1,>=0.7",
|
"tiktoken<1,>=0.7",
|
||||||
]
|
]
|
||||||
name = "langchain-openai"
|
name = "langchain-openai"
|
||||||
|
@ -463,7 +463,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-core"
|
name = "langchain-core"
|
||||||
version = "0.3.53"
|
version = "0.3.55"
|
||||||
source = { editable = "../../core" }
|
source = { editable = "../../core" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "jsonpatch" },
|
{ name = "jsonpatch" },
|
||||||
@ -571,7 +571,7 @@ typing = [
|
|||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "langchain-core", editable = "../../core" },
|
{ name = "langchain-core", editable = "../../core" },
|
||||||
{ name = "openai", specifier = ">=1.68.2,<2.0.0" },
|
{ name = "openai", specifier = ">=1.76.0,<2.0.0" },
|
||||||
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
{ name = "tiktoken", specifier = ">=0.7,<1" },
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -829,7 +829,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openai"
|
name = "openai"
|
||||||
version = "1.68.2"
|
version = "1.76.0"
|
||||||
source = { registry = "https://pypi.org/simple" }
|
source = { registry = "https://pypi.org/simple" }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "anyio" },
|
{ name = "anyio" },
|
||||||
@ -841,9 +841,9 @@ dependencies = [
|
|||||||
{ name = "tqdm" },
|
{ name = "tqdm" },
|
||||||
{ name = "typing-extensions" },
|
{ name = "typing-extensions" },
|
||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/3f/6b/6b002d5d38794645437ae3ddb42083059d556558493408d39a0fcea608bc/openai-1.68.2.tar.gz", hash = "sha256:b720f0a95a1dbe1429c0d9bb62096a0d98057bcda82516f6e8af10284bdd5b19", size = 413429 }
|
sdist = { url = "https://files.pythonhosted.org/packages/84/51/817969ec969b73d8ddad085670ecd8a45ef1af1811d8c3b8a177ca4d1309/openai-1.76.0.tar.gz", hash = "sha256:fd2bfaf4608f48102d6b74f9e11c5ecaa058b60dad9c36e409c12477dfd91fb2", size = 434660 }
|
||||||
wheels = [
|
wheels = [
|
||||||
{ url = "https://files.pythonhosted.org/packages/fd/34/cebce15f64eb4a3d609a83ac3568d43005cc9a1cba9d7fde5590fd415423/openai-1.68.2-py3-none-any.whl", hash = "sha256:24484cb5c9a33b58576fdc5acf0e5f92603024a4e39d0b99793dfa1eb14c2b36", size = 606073 },
|
{ url = "https://files.pythonhosted.org/packages/59/aa/84e02ab500ca871eb8f62784426963a1c7c17a72fea3c7f268af4bbaafa5/openai-1.76.0-py3-none-any.whl", hash = "sha256:a712b50e78cf78e6d7b2a8f69c4978243517c2c36999756673e07a14ce37dc0a", size = 661201 },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
Loading…
Reference in New Issue
Block a user