bump version to 151 (#3658)

This commit is contained in:
Harrison Chase 2023-04-27 09:02:39 -07:00 committed by GitHub
parent 3b609642ae
commit 0cf890eed4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 10 additions and 7 deletions

View File

@ -4,13 +4,11 @@ from __future__ import annotations
import logging import logging
import os import os
from typing import Any, Dict, Iterable, List, Optional, Union from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union
import aiohttp import aiohttp
import requests import requests
from aiohttp import ServerTimeoutError from aiohttp import ServerTimeoutError
from azure.core.credentials import TokenCredential
from azure.core.exceptions import ClientAuthenticationError
from pydantic import BaseModel, Field, root_validator from pydantic import BaseModel, Field, root_validator
from requests.exceptions import Timeout from requests.exceptions import Timeout
@ -20,6 +18,9 @@ _LOGGER = logging.getLogger(__name__)
BASE_URL = os.getenv("POWERBI_BASE_URL", "https://api.powerbi.com/v1.0/myorg/datasets/") BASE_URL = os.getenv("POWERBI_BASE_URL", "https://api.powerbi.com/v1.0/myorg/datasets/")
if TYPE_CHECKING:
from azure.core.credentials import TokenCredential
class PowerBIDataset(BaseModel): class PowerBIDataset(BaseModel):
"""Create PowerBI engine from dataset ID and credential or token. """Create PowerBI engine from dataset ID and credential or token.
@ -62,6 +63,8 @@ class PowerBIDataset(BaseModel):
@property @property
def headers(self) -> Dict[str, str]: def headers(self) -> Dict[str, str]:
"""Get the token.""" """Get the token."""
from azure.core.exceptions import ClientAuthenticationError
token = None token = None
if self.token: if self.token:
token = self.token token = self.token

View File

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "langchain" name = "langchain"
version = "0.0.150" version = "0.0.151"
description = "Building applications with LLMs through composability" description = "Building applications with LLMs through composability"
authors = [] authors = []
license = "MIT" license = "MIT"
@ -17,7 +17,7 @@ SQLAlchemy = ">1.3,<3"
requests = "^2" requests = "^2"
PyYAML = ">=5.4.1" PyYAML = ">=5.4.1"
numpy = "^1" numpy = "^1"
azure-core = {version = "^1.26.4"} azure-core = {version = "^1.26.4", optional=true}
tqdm = {version = ">=4.48.0", optional = true} tqdm = {version = ">=4.48.0", optional = true}
openapi-schema-pydantic = "^1.2" openapi-schema-pydantic = "^1.2"
faiss-cpu = {version = "^1", optional = true} faiss-cpu = {version = "^1", optional = true}
@ -149,8 +149,8 @@ qdrant = ["qdrant-client"]
openai = ["openai"] openai = ["openai"]
cohere = ["cohere"] cohere = ["cohere"]
embeddings = ["sentence-transformers"] embeddings = ["sentence-transformers"]
azure = ["azure-identity", "azure-cosmos", "openai"] azure = ["azure-identity", "azure-cosmos", "openai", "azure-core"]
all = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "jina", "manifest-ml", "elasticsearch", "opensearch-py", "google-search-results", "faiss-cpu", "sentence-transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "pinecone-text", "weaviate-client", "redis", "google-api-python-client", "wolframalpha", "qdrant-client", "tensorflow-text", "pypdf", "networkx", "nomic", "aleph-alpha-client", "deeplake", "pgvector", "psycopg2-binary", "boto3", "pyowm", "pytesseract", "html2text", "atlassian-python-api", "gptcache", "duckduckgo-search", "arxiv", "azure-identity", "clickhouse-connect", "azure-cosmos"] all = ["anthropic", "cohere", "openai", "nlpcloud", "huggingface_hub", "jina", "manifest-ml", "elasticsearch", "opensearch-py", "google-search-results", "faiss-cpu", "sentence-transformers", "transformers", "spacy", "nltk", "wikipedia", "beautifulsoup4", "tiktoken", "torch", "jinja2", "pinecone-client", "pinecone-text", "weaviate-client", "redis", "google-api-python-client", "wolframalpha", "qdrant-client", "tensorflow-text", "pypdf", "networkx", "nomic", "aleph-alpha-client", "deeplake", "pgvector", "psycopg2-binary", "boto3", "pyowm", "pytesseract", "html2text", "atlassian-python-api", "gptcache", "duckduckgo-search", "arxiv", "azure-identity", "clickhouse-connect", "azure-cosmos", "lancedb", "lark"]
[tool.ruff] [tool.ruff]
select = [ select = [