mirror of
https://github.com/hwchase17/langchain.git
synced 2025-04-28 20:05:58 +00:00
update openai
This commit is contained in:
parent
7de02c5997
commit
a4857bf09b
@ -886,8 +886,13 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
_, encoding_model = self._get_encoding_model()
|
_, encoding_model = self._get_encoding_model()
|
||||||
return encoding_model.encode(text)
|
return encoding_model.encode(text)
|
||||||
|
|
||||||
# TODO: Count bound tools as part of input.
|
def get_num_tokens_from_messages(
|
||||||
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
|
self,
|
||||||
|
messages: List[BaseMessage],
|
||||||
|
tools: Optional[
|
||||||
|
Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]
|
||||||
|
] = None,
|
||||||
|
) -> int:
|
||||||
"""Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package.
|
"""Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package.
|
||||||
|
|
||||||
**Requirements**: You must have the ``pillow`` installed if you want to count
|
**Requirements**: You must have the ``pillow`` installed if you want to count
|
||||||
@ -897,7 +902,16 @@ class BaseChatOpenAI(BaseChatModel):
|
|||||||
counting.
|
counting.
|
||||||
|
|
||||||
OpenAI reference: https://github.com/openai/openai-cookbook/blob/
|
OpenAI reference: https://github.com/openai/openai-cookbook/blob/
|
||||||
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""
|
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb
|
||||||
|
|
||||||
|
Args:
|
||||||
|
messages: The message inputs to tokenize.
|
||||||
|
tools: If provided, sequence of dict, BaseModel, function, or BaseTools
|
||||||
|
to be converted to tool schemas.
|
||||||
|
"""
|
||||||
|
# TODO: Count bound tools as part of input.
|
||||||
|
if tools is not None:
|
||||||
|
warnings.warn("Counting tokens in tool schemas is not yet supported.")
|
||||||
if sys.version_info[1] <= 7:
|
if sys.version_info[1] <= 7:
|
||||||
return super().get_num_tokens_from_messages(messages)
|
return super().get_num_tokens_from_messages(messages)
|
||||||
model, encoding = self._get_encoding_model()
|
model, encoding = self._get_encoding_model()
|
||||||
|
Loading…
Reference in New Issue
Block a user