mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-09-10 19:49:34 +00:00
Better error message if .env is empty/does not exist.
This commit is contained in:
@@ -6,6 +6,8 @@ load_dotenv()
|
|||||||
|
|
||||||
# Define the folder for storing database
|
# Define the folder for storing database
|
||||||
PERSIST_DIRECTORY = os.environ.get('PERSIST_DIRECTORY')
|
PERSIST_DIRECTORY = os.environ.get('PERSIST_DIRECTORY')
|
||||||
|
if PERSIST_DIRECTORY is None:
|
||||||
|
raise Exception("Please set the PERSIST_DIRECTORY environment variable")
|
||||||
|
|
||||||
# Define the Chroma settings
|
# Define the Chroma settings
|
||||||
CHROMA_SETTINGS = Settings(
|
CHROMA_SETTINGS = Settings(
|
||||||
|
@@ -24,12 +24,13 @@ from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|||||||
from langchain.vectorstores import Chroma
|
from langchain.vectorstores import Chroma
|
||||||
from langchain.embeddings import HuggingFaceEmbeddings
|
from langchain.embeddings import HuggingFaceEmbeddings
|
||||||
from langchain.docstore.document import Document
|
from langchain.docstore.document import Document
|
||||||
|
|
||||||
|
if not load_dotenv():
|
||||||
|
print("Could not load .env file or it is empty. Please check if it exists and is readable.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
from constants import CHROMA_SETTINGS
|
from constants import CHROMA_SETTINGS
|
||||||
|
|
||||||
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
|
|
||||||
# Load environment variables
|
# Load environment variables
|
||||||
persist_directory = os.environ.get('PERSIST_DIRECTORY')
|
persist_directory = os.environ.get('PERSIST_DIRECTORY')
|
||||||
source_directory = os.environ.get('SOURCE_DIRECTORY', 'source_documents')
|
source_directory = os.environ.get('SOURCE_DIRECTORY', 'source_documents')
|
||||||
|
@@ -9,7 +9,9 @@ import os
|
|||||||
import argparse
|
import argparse
|
||||||
import time
|
import time
|
||||||
|
|
||||||
load_dotenv()
|
if not load_dotenv():
|
||||||
|
print("Could not load .env file or it is empty. Please check if it exists and is readable.")
|
||||||
|
exit(1)
|
||||||
|
|
||||||
embeddings_model_name = os.environ.get("EMBEDDINGS_MODEL_NAME")
|
embeddings_model_name = os.environ.get("EMBEDDINGS_MODEL_NAME")
|
||||||
persist_directory = os.environ.get('PERSIST_DIRECTORY')
|
persist_directory = os.environ.get('PERSIST_DIRECTORY')
|
||||||
@@ -39,7 +41,7 @@ def main():
|
|||||||
case _default:
|
case _default:
|
||||||
# raise exception if model_type is not supported
|
# raise exception if model_type is not supported
|
||||||
raise Exception(f"Model type {model_type} is not supported. Please choose one of the following: LlamaCpp, GPT4All")
|
raise Exception(f"Model type {model_type} is not supported. Please choose one of the following: LlamaCpp, GPT4All")
|
||||||
|
|
||||||
qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source)
|
qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever, return_source_documents= not args.hide_source)
|
||||||
# Interactive questions and answers
|
# Interactive questions and answers
|
||||||
while True:
|
while True:
|
||||||
|
Reference in New Issue
Block a user