Revert "cmake: do not modify gpt4all.app after signing it (#3413)"

This reverts commit c01ac7fa93.

Signed-off-by: Jared Van Bortel <jared@nomic.ai>
This commit is contained in:
Jared Van Bortel 2025-01-24 13:21:34 -05:00
parent c01ac7fa93
commit f4f7de51e7
4 changed files with 28 additions and 29 deletions

View File

@ -9,7 +9,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
### Fixed ### Fixed
- Fix regression while using localdocs with server API ([#3410](https://github.com/nomic-ai/gpt4all/pull/3410)) - Fix regression while using localdocs with server API ([#3410](https://github.com/nomic-ai/gpt4all/pull/3410))
- Don't show system messages in server chat view ([#3411](https://github.com/nomic-ai/gpt4all/pull/3411)) - Don't show system messages in server chat view ([#3411](https://github.com/nomic-ai/gpt4all/pull/3411))
- Fix `codesign --verify` failure on macOS ([#3413](https://github.com/nomic-ai/gpt4all/pull/3413))
## [3.7.0] - 2025-01-21 ## [3.7.0] - 2025-01-21
@ -271,7 +270,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/).
- Fix several Vulkan resource management issues ([#2694](https://github.com/nomic-ai/gpt4all/pull/2694)) - Fix several Vulkan resource management issues ([#2694](https://github.com/nomic-ai/gpt4all/pull/2694))
- Fix crash/hang when some models stop generating, by showing special tokens ([#2701](https://github.com/nomic-ai/gpt4all/pull/2701)) - Fix crash/hang when some models stop generating, by showing special tokens ([#2701](https://github.com/nomic-ai/gpt4all/pull/2701))
[Unreleased]: https://github.com/nomic-ai/gpt4all/compare/v3.7.0...HEAD
[3.7.0]: https://github.com/nomic-ai/gpt4all/compare/v3.6.1...v3.7.0 [3.7.0]: https://github.com/nomic-ai/gpt4all/compare/v3.6.1...v3.7.0
[3.6.1]: https://github.com/nomic-ai/gpt4all/compare/v3.6.0...v3.6.1 [3.6.1]: https://github.com/nomic-ai/gpt4all/compare/v3.6.0...v3.6.1
[3.6.0]: https://github.com/nomic-ai/gpt4all/compare/v3.5.3...v3.6.0 [3.6.0]: https://github.com/nomic-ai/gpt4all/compare/v3.5.3...v3.6.0

View File

@ -445,18 +445,12 @@ endif()
# -- install -- # -- install --
if (APPLE)
set(GPT4ALL_LIB_DEST bin/gpt4all.app/Contents/Frameworks)
else()
set(GPT4ALL_LIB_DEST lib)
endif()
install(TARGETS chat DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN}) install(TARGETS chat DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN})
install( install(
TARGETS llmodel TARGETS llmodel
LIBRARY DESTINATION ${GPT4ALL_LIB_DEST} COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib LIBRARY DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib
RUNTIME DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN} # .dll RUNTIME DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN} # .dll
) )
# We should probably iterate through the list of the cmake for backend, but these need to be installed # We should probably iterate through the list of the cmake for backend, but these need to be installed
@ -479,8 +473,8 @@ endif()
install( install(
TARGETS ${MODEL_IMPL_TARGETS} TARGETS ${MODEL_IMPL_TARGETS}
LIBRARY DESTINATION ${GPT4ALL_LIB_DEST} COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib LIBRARY DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib
RUNTIME DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .dll RUNTIME DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .dll
) )
if(APPLE AND GPT4ALL_SIGN_INSTALL) if(APPLE AND GPT4ALL_SIGN_INSTALL)
@ -509,7 +503,7 @@ if (LLMODEL_CUDA)
TARGETS llamamodel-mainline-cuda TARGETS llamamodel-mainline-cuda
llamamodel-mainline-cuda-avxonly llamamodel-mainline-cuda-avxonly
RUNTIME_DEPENDENCY_SET llama-cuda-deps RUNTIME_DEPENDENCY_SET llama-cuda-deps
LIBRARY DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .so LIBRARY DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib
RUNTIME DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .dll RUNTIME DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN} # .dll
) )
if (WIN32) if (WIN32)
@ -525,11 +519,11 @@ endif()
if (NOT GPT4ALL_USING_QTPDF) if (NOT GPT4ALL_USING_QTPDF)
# Install PDFium # Install PDFium
install( if (WIN32)
FILES ${PDFium_LIBRARY} install(FILES "${PDFium_LIBRARY}" DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN})
LIBRARY DESTINATION ${GPT4ALL_LIB_DEST} COMPONENT ${COMPONENT_NAME_MAIN} # .so/.dylib else()
RUNTIME DESTINATION bin COMPONENT ${COMPONENT_NAME_MAIN} # .dll install(FILES "${PDFium_LIBRARY}" DESTINATION lib COMPONENT ${COMPONENT_NAME_MAIN})
) endif()
endif() endif()
if (NOT APPLE) if (NOT APPLE)

View File

@ -8,6 +8,12 @@ if (GPT4ALL_SIGN_INSTALL)
set(MAC_NOTARIZE -sign-for-notarization=${GPT4ALL_SIGNING_ID}) set(MAC_NOTARIZE -sign-for-notarization=${GPT4ALL_SIGNING_ID})
endif() endif()
execute_process(COMMAND ${MACDEPLOYQT} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app -qmldir=${CMAKE_CURRENT_SOURCE_DIR} -verbose=2 ${MAC_NOTARIZE}) execute_process(COMMAND ${MACDEPLOYQT} ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app -qmldir=${CMAKE_CURRENT_SOURCE_DIR} -verbose=2 ${MAC_NOTARIZE})
file(GLOB MYLLAMALIBS ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllama*)
file(GLOB MYLLMODELLIBS ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/lib/libllmodel.*)
file(COPY ${MYLLAMALIBS}
DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app/Contents/Frameworks)
file(COPY ${MYLLMODELLIBS}
DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data/bin/gpt4all.app/Contents/Frameworks)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/gpt4all-32.png" file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/gpt4all-32.png"
DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data) DESTINATION ${CPACK_TEMPORARY_INSTALL_DIRECTORY}/packages/${COMPONENT_NAME_MAIN}/data)
file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/gpt4all-48.png" file(COPY "${CMAKE_CURRENT_SOURCE_DIR}/icons/gpt4all-48.png"

View File

@ -88,18 +88,19 @@ int main(int argc, char *argv[])
#endif #endif
// set search path before constructing the MySettings instance, which relies on this // set search path before constructing the MySettings instance, which relies on this
{ QString llmodelSearchPaths = QCoreApplication::applicationDirPath();
auto appDirPath = QCoreApplication::applicationDirPath(); const QString libDir = QCoreApplication::applicationDirPath() + "/../lib/";
QStringList searchPaths { if (LLM::directoryExists(libDir))
#ifdef Q_OS_DARWIN llmodelSearchPaths += ";" + libDir;
u"%1/../Frameworks"_s.arg(appDirPath), #if defined(Q_OS_MAC)
#else const QString binDir = QCoreApplication::applicationDirPath() + "/../../../";
appDirPath, if (LLM::directoryExists(binDir))
u"%1/../lib"_s.arg(appDirPath), llmodelSearchPaths += ";" + binDir;
const QString frameworksDir = QCoreApplication::applicationDirPath() + "/../Frameworks/";
if (LLM::directoryExists(frameworksDir))
llmodelSearchPaths += ";" + frameworksDir;
#endif #endif
}; LLModel::Implementation::setImplementationsSearchPath(llmodelSearchPaths.toStdString());
LLModel::Implementation::setImplementationsSearchPath(searchPaths.join(u';').toStdString());
}
// Set the local and language translation before the qml engine has even been started. This will // Set the local and language translation before the qml engine has even been started. This will
// use the default system locale unless the user has explicitly set it to use a different one. // use the default system locale unless the user has explicitly set it to use a different one.