Update poetry lock (#1209)

* Update the version of llama_index used to fix transient openai errors

* Update poetry.lock file

* Make `local` mode the default mode by default
This commit is contained in:
lopagela
2023-11-11 22:44:19 +01:00
committed by GitHub
parent a22969ad1f
commit a579c9bdc5
14 changed files with 313 additions and 268 deletions

View File

@@ -1,3 +1,5 @@
from typing import Literal
from fastapi import APIRouter
from pydantic import BaseModel, Field
@@ -16,8 +18,8 @@ class ChunksBody(BaseModel):
class ChunksResponse(BaseModel):
object: str = Field(enum=["list"])
model: str = Field(enum=["private-gpt"])
object: Literal["list"]
model: Literal["private-gpt"]
data: list[Chunk]

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Literal
from injector import inject, singleton
from llama_index import ServiceContext, StorageContext, VectorStoreIndex
@@ -19,7 +19,7 @@ if TYPE_CHECKING:
class Chunk(BaseModel):
object: str = Field(enum=["context.chunk"])
object: Literal["context.chunk"]
score: float = Field(examples=[0.023])
document: IngestedDoc
text: str = Field(examples=["Outbound sales increased 20%, driven by new leads."])