add falcon to chatllm::serialize

This commit is contained in:
Aaron Miller 2023-06-27 08:19:33 -07:00 committed by AT
parent 198b5e4832
commit e22dd164d8
2 changed files with 3 additions and 1 deletions

View File

@ -19,6 +19,7 @@
#define GPTJ_INTERNAL_STATE_VERSION 0 #define GPTJ_INTERNAL_STATE_VERSION 0
#define REPLIT_INTERNAL_STATE_VERSION 0 #define REPLIT_INTERNAL_STATE_VERSION 0
#define LLAMA_INTERNAL_STATE_VERSION 0 #define LLAMA_INTERNAL_STATE_VERSION 0
#define FALCON_INTERNAL_STATE_VERSION 0
class LLModelStore { class LLModelStore {
public: public:
@ -570,6 +571,7 @@ bool ChatLLM::serialize(QDataStream &stream, int version)
case MPT_: stream << MPT_INTERNAL_STATE_VERSION; break; case MPT_: stream << MPT_INTERNAL_STATE_VERSION; break;
case GPTJ_: stream << GPTJ_INTERNAL_STATE_VERSION; break; case GPTJ_: stream << GPTJ_INTERNAL_STATE_VERSION; break;
case LLAMA_: stream << LLAMA_INTERNAL_STATE_VERSION; break; case LLAMA_: stream << LLAMA_INTERNAL_STATE_VERSION; break;
case FALCON_: stream << LLAMA_INTERNAL_STATE_VERSION; break;
default: Q_UNREACHABLE(); default: Q_UNREACHABLE();
} }
} }

View File

@ -15,7 +15,7 @@ enum LLModelType {
LLAMA_, LLAMA_,
CHATGPT_, CHATGPT_,
REPLIT_, REPLIT_,
FALCON_ FALCON_,
}; };
struct LLModelInfo { struct LLModelInfo {