mirror of
https://github.com/hwchase17/langchain.git
synced 2025-06-24 07:35:18 +00:00
community
This commit is contained in:
parent
62a5f993f2
commit
29ba5fd1f1
@ -5,10 +5,22 @@ from __future__ import annotations
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
|
import warnings
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from langchain_core.messages import BaseMessage
|
from langchain_core.messages import BaseMessage
|
||||||
|
from langchain_core.tools import BaseTool
|
||||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||||
from pydantic import Field, SecretStr, model_validator
|
from pydantic import Field, SecretStr, model_validator
|
||||||
|
|
||||||
@ -197,10 +209,18 @@ class ChatAnyscale(ChatOpenAI):
|
|||||||
encoding = tiktoken_.get_encoding(model)
|
encoding = tiktoken_.get_encoding(model)
|
||||||
return model, encoding
|
return model, encoding
|
||||||
|
|
||||||
def get_num_tokens_from_messages(self, messages: list[BaseMessage]) -> int:
|
def get_num_tokens_from_messages(
|
||||||
|
self,
|
||||||
|
messages: list[BaseMessage],
|
||||||
|
tools: Optional[
|
||||||
|
Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]
|
||||||
|
] = None,
|
||||||
|
) -> int:
|
||||||
"""Calculate num tokens with tiktoken package.
|
"""Calculate num tokens with tiktoken package.
|
||||||
Official documentation: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb
|
Official documentation: https://github.com/openai/openai-cookbook/blob/main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb
|
||||||
"""
|
"""
|
||||||
|
if tools is not None:
|
||||||
|
warnings.warn("Counting tokens in tool schemas is not yet supported.")
|
||||||
if sys.version_info[1] <= 7:
|
if sys.version_info[1] <= 7:
|
||||||
return super().get_num_tokens_from_messages(messages)
|
return super().get_num_tokens_from_messages(messages)
|
||||||
model, encoding = self._get_encoding_model()
|
model, encoding = self._get_encoding_model()
|
||||||
|
@ -4,9 +4,21 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
from typing import TYPE_CHECKING, Any, Dict, Optional, Set
|
import warnings
|
||||||
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Optional,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
Type,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
from langchain_core.messages import BaseMessage
|
from langchain_core.messages import BaseMessage
|
||||||
|
from langchain_core.tools import BaseTool
|
||||||
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env
|
||||||
from pydantic import Field, model_validator
|
from pydantic import Field, model_validator
|
||||||
|
|
||||||
@ -138,11 +150,19 @@ class ChatEverlyAI(ChatOpenAI):
|
|||||||
encoding = tiktoken_.get_encoding(model)
|
encoding = tiktoken_.get_encoding(model)
|
||||||
return model, encoding
|
return model, encoding
|
||||||
|
|
||||||
def get_num_tokens_from_messages(self, messages: list[BaseMessage]) -> int:
|
def get_num_tokens_from_messages(
|
||||||
|
self,
|
||||||
|
messages: list[BaseMessage],
|
||||||
|
tools: Optional[
|
||||||
|
Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]
|
||||||
|
] = None,
|
||||||
|
) -> int:
|
||||||
"""Calculate num tokens with tiktoken package.
|
"""Calculate num tokens with tiktoken package.
|
||||||
|
|
||||||
Official documentation: https://github.com/openai/openai-cookbook/blob/
|
Official documentation: https://github.com/openai/openai-cookbook/blob/
|
||||||
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""
|
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""
|
||||||
|
if tools is not None:
|
||||||
|
warnings.warn("Counting tokens in tool schemas is not yet supported.")
|
||||||
if sys.version_info[1] <= 7:
|
if sys.version_info[1] <= 7:
|
||||||
return super().get_num_tokens_from_messages(messages)
|
return super().get_num_tokens_from_messages(messages)
|
||||||
model, encoding = self._get_encoding_model()
|
model, encoding = self._get_encoding_model()
|
||||||
|
@ -46,6 +46,7 @@ from langchain_core.messages import (
|
|||||||
)
|
)
|
||||||
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult
|
||||||
from langchain_core.runnables import Runnable
|
from langchain_core.runnables import Runnable
|
||||||
|
from langchain_core.tools import BaseTool
|
||||||
from langchain_core.utils import (
|
from langchain_core.utils import (
|
||||||
get_from_dict_or_env,
|
get_from_dict_or_env,
|
||||||
get_pydantic_field_names,
|
get_pydantic_field_names,
|
||||||
@ -644,11 +645,19 @@ class ChatOpenAI(BaseChatModel):
|
|||||||
_, encoding_model = self._get_encoding_model()
|
_, encoding_model = self._get_encoding_model()
|
||||||
return encoding_model.encode(text)
|
return encoding_model.encode(text)
|
||||||
|
|
||||||
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
|
def get_num_tokens_from_messages(
|
||||||
|
self,
|
||||||
|
messages: List[BaseMessage],
|
||||||
|
tools: Optional[
|
||||||
|
Sequence[Union[Dict[str, Any], Type, Callable, BaseTool]]
|
||||||
|
] = None,
|
||||||
|
) -> int:
|
||||||
"""Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package.
|
"""Calculate num tokens for gpt-3.5-turbo and gpt-4 with tiktoken package.
|
||||||
|
|
||||||
Official documentation: https://github.com/openai/openai-cookbook/blob/
|
Official documentation: https://github.com/openai/openai-cookbook/blob/
|
||||||
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""
|
main/examples/How_to_format_inputs_to_ChatGPT_models.ipynb"""
|
||||||
|
if tools is not None:
|
||||||
|
warnings.warn("Counting tokens in tool schemas is not yet supported.")
|
||||||
if sys.version_info[1] <= 7:
|
if sys.version_info[1] <= 7:
|
||||||
return super().get_num_tokens_from_messages(messages)
|
return super().get_num_tokens_from_messages(messages)
|
||||||
model, encoding = self._get_encoding_model()
|
model, encoding = self._get_encoding_model()
|
||||||
|
Loading…
Reference in New Issue
Block a user