Fixed bug in AnalyticDB Vector Store caused by upgrade SQLAlchemy version (#6736)

This commit is contained in:
Richy Wang 2023-06-26 20:35:25 +08:00 committed by GitHub
parent d84a3bcf7a
commit ec8247ec59
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -80,6 +80,7 @@ class AnalyticDB(VectorStore):
extend_existing=True, extend_existing=True,
) )
with self.engine.connect() as conn: with self.engine.connect() as conn:
with conn.begin():
# Create the table # Create the table
Base.metadata.create_all(conn) Base.metadata.create_all(conn)
@ -107,7 +108,6 @@ class AnalyticDB(VectorStore):
""" """
) )
conn.execute(index_statement) conn.execute(index_statement)
conn.commit()
def create_collection(self) -> None: def create_collection(self) -> None:
if self.pre_delete_collection: if self.pre_delete_collection:
@ -118,8 +118,8 @@ class AnalyticDB(VectorStore):
self.logger.debug("Trying to delete collection") self.logger.debug("Trying to delete collection")
drop_statement = text(f"DROP TABLE IF EXISTS {self.collection_name};") drop_statement = text(f"DROP TABLE IF EXISTS {self.collection_name};")
with self.engine.connect() as conn: with self.engine.connect() as conn:
with conn.begin():
conn.execute(drop_statement) conn.execute(drop_statement)
conn.commit()
def add_texts( def add_texts(
self, self,
@ -160,6 +160,7 @@ class AnalyticDB(VectorStore):
chunks_table_data = [] chunks_table_data = []
with self.engine.connect() as conn: with self.engine.connect() as conn:
with conn.begin():
for document, metadata, chunk_id, embedding in zip( for document, metadata, chunk_id, embedding in zip(
texts, metadatas, ids, embeddings texts, metadatas, ids, embeddings
): ):
@ -182,9 +183,6 @@ class AnalyticDB(VectorStore):
if chunks_table_data: if chunks_table_data:
conn.execute(insert(chunks_table).values(chunks_table_data)) conn.execute(insert(chunks_table).values(chunks_table_data))
# Commit the transaction only once after all records have been inserted
conn.commit()
return ids return ids
def similarity_search( def similarity_search(
@ -333,9 +331,9 @@ class AnalyticDB(VectorStore):
) -> AnalyticDB: ) -> AnalyticDB:
""" """
Return VectorStore initialized from texts and embeddings. Return VectorStore initialized from texts and embeddings.
Postgres connection string is required Postgres Connection string is required
Either pass it as a parameter Either pass it as a parameter
or set the PGVECTOR_CONNECTION_STRING environment variable. or set the PG_CONNECTION_STRING environment variable.
""" """
connection_string = cls.get_connection_string(kwargs) connection_string = cls.get_connection_string(kwargs)
@ -363,7 +361,7 @@ class AnalyticDB(VectorStore):
raise ValueError( raise ValueError(
"Postgres connection string is required" "Postgres connection string is required"
"Either pass it as a parameter" "Either pass it as a parameter"
"or set the PGVECTOR_CONNECTION_STRING environment variable." "or set the PG_CONNECTION_STRING environment variable."
) )
return connection_string return connection_string
@ -381,9 +379,9 @@ class AnalyticDB(VectorStore):
) -> AnalyticDB: ) -> AnalyticDB:
""" """
Return VectorStore initialized from documents and embeddings. Return VectorStore initialized from documents and embeddings.
Postgres connection string is required Postgres Connection string is required
Either pass it as a parameter Either pass it as a parameter
or set the PGVECTOR_CONNECTION_STRING environment variable. or set the PG_CONNECTION_STRING environment variable.
""" """
texts = [d.page_content for d in documents] texts = [d.page_content for d in documents]