mirror of
https://github.com/imartinez/privateGPT.git
synced 2025-09-06 17:51:03 +00:00
addes UI changes for MTU
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -29,3 +29,4 @@ __pycache__/
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
nohup.out
|
||||
|
Binary file not shown.
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 16 KiB |
40
private_gpt/ui/avatar-bot.svg
Normal file
40
private_gpt/ui/avatar-bot.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 20010904//EN"
|
||||
"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
|
||||
<svg version="1.0" xmlns="http://www.w3.org/2000/svg"
|
||||
width="474.000000pt" height="248.000000pt" viewBox="0 0 474.000000 248.000000"
|
||||
preserveAspectRatio="xMidYMid meet">
|
||||
|
||||
<g transform="translate(0.000000,248.000000) scale(0.100000,-0.100000)"
|
||||
fill="#000000" stroke="none">
|
||||
<path d="M2232 2112 c-27 -37 -68 -95 -92 -128 l-42 -62 -62 -6 c-33 -4 -106
|
||||
-16 -161 -27 -490 -100 -836 -305 -966 -574 -149 -309 30 -630 470 -842 133
|
||||
-64 504 -183 520 -167 3 2 -41 93 -96 202 -92 179 -104 199 -139 216 -63 32
|
||||
-143 46 -272 46 l-123 0 18 31 c29 51 171 197 239 245 62 44 225 127 276 139
|
||||
25 6 29 2 53 -47 32 -62 101 -158 115 -158 6 0 10 8 10 18 0 32 58 141 99 185
|
||||
23 24 59 57 81 72 48 33 50 45 10 45 -47 0 -35 15 27 34 53 17 56 19 38 32
|
||||
-10 8 -24 14 -29 14 -21 0 19 20 41 20 20 0 21 4 16 58 -7 71 14 128 52 142
|
||||
15 6 51 10 82 10 49 0 54 2 42 16 -20 24 -69 47 -129 59 -53 11 -55 13 -68 56
|
||||
-24 78 4 201 61 274 16 20 17 19 56 -25 75 -84 112 -175 138 -334 l8 -49 70
|
||||
-19 c85 -23 272 -114 339 -165 44 -34 51 -45 71 -113 12 -41 25 -83 30 -92 6
|
||||
-15 2 -18 -21 -18 -44 0 -21 -13 141 -80 83 -33 185 -76 228 -93 73 -30 77
|
||||
-34 72 -58 -3 -14 1 -45 10 -67 l17 -41 -66 -61 c-72 -68 -135 -120 -145 -120
|
||||
-3 0 -62 10 -131 21 -78 13 -195 23 -310 26 -167 5 -240 1 -435 -23 l-50 -6
|
||||
-23 41 c-30 54 -52 158 -52 256 l0 80 -20 -25 c-61 -78 -86 -250 -56 -381 10
|
||||
-41 19 -79 21 -86 3 -9 18 -7 59 7 65 22 216 54 223 47 3 -3 -15 -16 -39 -28
|
||||
-95 -48 -214 -205 -234 -306 l-6 -33 163 0 c551 0 1059 162 1327 423 310 303
|
||||
248 669 -161 946 -27 19 -104 59 -170 91 -172 81 -326 129 -535 166 l-104 18
|
||||
-30 68 c-17 37 -37 68 -45 68 -24 0 -89 -38 -128 -75 -21 -19 -39 -34 -40 -33
|
||||
-1 2 -13 23 -27 48 -30 51 -116 155 -146 176 -19 13 -23 9 -70 -54z m-182
|
||||
-294 c-1 -7 -9 -28 -18 -47 -16 -33 -21 -35 -95 -48 -42 -7 -94 -20 -114 -28
|
||||
-33 -14 -35 -16 -18 -28 15 -12 5 -17 -86 -46 -243 -76 -472 -194 -638 -330
|
||||
l-44 -36 23 45 c69 135 205 256 395 350 147 73 269 114 445 151 161 33 150 32
|
||||
150 17z m878 -38 c525 -128 845 -428 793 -743 -39 -237 -286 -451 -661 -572
|
||||
-163 -52 -279 -75 -514 -99 l-39 -4 6 52 c9 89 70 186 140 222 47 25 145 27
|
||||
374 9 122 -10 233 -15 247 -11 53 13 310 253 322 300 10 42 -5 58 -82 86 -134
|
||||
50 -361 168 -384 200 -26 36 -64 132 -74 183 -6 33 -22 48 -130 133 l-123 95
|
||||
-12 67 c-6 37 -14 77 -17 90 -8 29 3 29 154 -8z"/>
|
||||
<path d="M2630 1360 c0 -4 7 -11 15 -14 12 -4 14 -14 10 -39 -10 -52 19 -71
|
||||
113 -75 l77 -3 -38 35 c-21 18 -55 46 -76 60 -40 28 -101 50 -101 36z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 2.5 KiB |
1
private_gpt/ui/images copy.py
Normal file
1
private_gpt/ui/images copy.py
Normal file
@@ -0,0 +1 @@
|
||||
logo_svg = "data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iODYxIiBoZWlnaHQ9Ijk4IiB2aWV3Qm94PSIwIDAgODYxIDk4IiBmaWxsPSJub25lIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciPgo8cGF0aCBkPSJNNDguMTM0NSAwLjE1NzkxMUMzNi44Mjk5IDEuMDM2NTQgMjYuMTIwNSA1LjU1MzI4IDE3LjYyNTYgMTMuMDI1QzkuMTMwNDYgMjAuNDk2NyAzLjMxMTcgMzAuNTE2OSAxLjA0OTUyIDQxLjU3MDVDLTEuMjEyNzMgNTIuNjIzOCAwLjIwNDQxOSA2NC4xMDk0IDUuMDg2MiA3NC4yOTA1QzkuOTY4NjggODQuNDcxNiAxOC4wNTAzIDkyLjc5NDMgMjguMTA5OCA5OEwzMy43MDI2IDgyLjU5MDdMMzUuNDU0MiA3Ny43NjU2QzI5LjgzODcgNzQuMTY5MiAyNS41NDQ0IDY4Ljg2MDcgMjMuMjE0IDYyLjYzNDRDMjAuODgyMiA1Ni40MDg2IDIwLjYzOSA0OS41OTkxIDIyLjUyMDQgNDMuMjI0M0MyNC40MDI5IDM2Ljg0OTUgMjguMzA5NiAzMS4yNTI1IDMzLjY1NjEgMjcuMjcwNkMzOS4wMDIgMjMuMjg4MyA0NS41MDAzIDIxLjEzNSA1Mi4xNzg5IDIxLjEzM0M1OC44NTczIDIxLjEzMDMgNjUuMzU3MSAyMy4yNzgzIDcwLjcwNjUgMjcuMjU1OEM3Ni4wNTU0IDMxLjIzNCA3OS45NjY0IDM2LjgyNzcgODEuODU0MyA0My4yMDA2QzgzLjc0MjkgNDkuNTczNiA4My41MDYyIDU2LjM4MzYgODEuMTgwMSA2Mi42MTE3Qzc4Ljg1NDUgNjguODM5NiA3NC41NjUgNzQuMTUxNCA2OC45NTI5IDc3Ljc1MjhMNzAuNzA3NCA4Mi41OTA3TDc2LjMwMDIgOTcuOTk3MUM4Ni45Nzg4IDkyLjQ3MDUgOTUuNDA4OCA4My40NDE5IDEwMC4xNjMgNzIuNDQwNEMxMDQuOTE3IDYxLjQzOTQgMTA1LjcwNCA0OS4xNDE3IDEwMi4zODkgMzcuNjNDOTkuMDc0NiAyNi4xMTc5IDkxLjg2MjcgMTYuMDk5MyA4MS45NzQzIDkuMjcwNzlDNzIuMDg2MSAyLjQ0MTkxIDYwLjEyOTEgLTAuNzc3MDg2IDQ4LjEyODYgMC4xNTg5MzRMNDguMTM0NSAwLjE1NzkxMVoiIGZpbGw9IiMxRjFGMjkiLz4KPGcgY2xpcC1wYXRoPSJ1cmwoI2NsaXAwXzVfMTkpIj4KPHBhdGggZD0iTTIyMC43NzIgMTIuNzUyNEgyNTIuNjM5QzI2Ny4yNjMgMTIuNzUyNCAyNzcuNzM5IDIxLjk2NzUgMjc3LjczOSAzNS40MDUyQzI3Ny43MzkgNDYuNzg3IDI2OS44ODEgNTUuMzUwOCAyNTguMzE0IDU3LjQxMDdMMjc4LjgzIDg1LjM3OTRIMjYxLjM3TDI0Mi4wNTQgNTcuOTUzM0gyMzUuNTA2Vjg1LjM3OTRIMjIwLjc3NEwyMjAuNzcyIDEyLjc1MjRaTTIzNS41MDQgMjYuMzAyOFY0NC40MDdIMjUyLjYzMkMyNTguOTYyIDQ0LjQwNyAyNjIuOTk5IDQwLjgyOTggMjYyLjk5OSAzNS40MTAyQzI2Mi45OTkgMjkuODgwOSAyNTguOTYyIDI2LjMwMjggMjUyLjYzMiAyNi4zMDI4SDIzNS41MDRaIiBmaWxsPSIjMUYxRjI5Ii8+CjxwYXRoIGQ9Ik0yOTUuMTc2IDg1LjM4NDRWMTIuNzUyNEgzMDkuOTA5Vjg1LjM4NDRIMjk1LjE3NloiIGZpbGw9IiMxRjFGMjkiLz4KPHBhdGggZD0iTTM2My43OTUgNjUuNzYzTDM4NS42MiAxMi43NTI0SDQwMS40NDRMMzcxLjIxNSA4NS4zODQ0SDM1Ni40ODNMMzI2LjI1NCAxMi43NTI0SDM0Mi4wNzhMMzYzLjc5NSA2NS43NjNaIiBmaWxsPSIjMUYxRjI5Ii8+CjxwYXRoIGQ9Ik00NDguMzI3IDcyLjA1MDRINDE1LjY5OEw0MTAuMjQxIDg1LjM4NDRIMzk0LjQxOEw0MjQuNjQ3IDEyLjc1MjRINDM5LjM3OUw0NjkuNjA4IDg1LjM4NDRINDUzLjc4M0w0NDguMzI3IDcyLjA1MDRaTTQ0Mi43NjEgNTguNUw0MzIuMDY2IDMyLjM3NDhMNDIxLjI2MiA1OC41SDQ0Mi43NjFaIiBmaWxsPSIjMUYxRjI5Ii8+CjxwYXRoIGQ9Ik00NjUuMjIxIDEyLjc1MjRINTMwLjU5MlYyNi4zMDI4SDUwNS4yNzVWODUuMzg0NEg0OTAuNTM5VjI2LjMwMjhINDY1LjIyMVYxMi43NTI0WiIgZmlsbD0iIzFGMUYyOSIvPgo8cGF0aCBkPSJNNTk1LjE5MyAxMi43NTI0VjI2LjMwMjhINTYyLjEyOFY0MS4xNTUxSDU5NS4xOTNWNTQuNzA2NUg1NjIuMTI4VjcxLjgzNEg1OTUuMTkzVjg1LjM4NDRINTQ3LjM5NVYxMi43NTI0SDU5NS4xOTNaIiBmaWxsPSIjMUYxRjI5Ii8+CjxwYXRoIGQ9Ik0xNjcuMjAxIDU3LjQxNThIMTg2LjUzNkMxOTAuODg2IDU3LjQ2NjIgMTk1LjE2OCA1Ni4zMzQ4IDE5OC45MTggNTQuMTQzN0MyMDIuMTc5IDUyLjIxOTkgMjA0Ljg2OSA0OS40NzM2IDIwNi43MTYgNDYuMTgzNUMyMDguNTYyIDQyLjg5MzQgMjA5LjUgMzkuMTc2NiAyMDkuNDMzIDM1LjQxMDJDMjA5LjQzMyAyMS45Njc1IDE5OC45NTggMTIuNzU3NCAxODQuMzM0IDEyLjc1NzRIMTUyLjQ2OFY4NS4zODk0SDE2Ny4yMDFWNTcuNDIwN1Y1Ny40MTU4Wk0xNjcuMjAxIDI2LjMwNThIMTg0LjMyOUMxOTAuNjU4IDI2LjMwNTggMTk0LjY5NiAyOS44ODQgMTk0LjY5NiAzNS40MTMzQzE5NC42OTYgNDAuODMyOSAxOTAuNjU4IDQ0LjQwOTkgMTg0LjMyOSA0NC40MDk5SDE2Ny4yMDFWMjYuMzA1OFoiIGZpbGw9IiMxRjFGMjkiLz4KPHBhdGggZD0iTTc5NC44MzUgMTIuNzUyNEg4NjAuMjA2VjI2LjMwMjhIODM0Ljg4OVY4NS4zODQ0SDgyMC4xNTZWMjYuMzAyOEg3OTQuODM1VjEyLjc1MjRaIiBmaWxsPSIjMUYxRjI5Ii8+CjxwYXRoIGQ9Ik03NDEuOTA3IDU3LjQxNThINzYxLjI0MUM3NjUuNTkyIDU3LjQ2NjEgNzY5Ljg3NCA1Ni4zMzQ3IDc3My42MjQgNTQuMTQzN0M3NzYuODg0IDUyLjIxOTkgNzc5LjU3NSA0OS40NzM2IDc4MS40MjEgNDYuMTgzNUM3ODMuMjY4IDQyLjg5MzQgNzg0LjIwNiAzOS4xNzY2IDc4NC4xMzkgMzUuNDEwMkM3ODQuMTM5IDIxLjk2NzUgNzczLjY2NCAxMi43NTc0IDc1OS4wMzkgMTIuNzU3NEg3MjcuMTc1Vjg1LjM4OTRINzQxLjkwN1Y1Ny40MjA3VjU3LjQxNThaTTc0MS45MDcgMjYuMzA1OEg3NTkuMDM1Qzc2NS4zNjUgMjYuMzA1OCA3NjkuNDAzIDI5Ljg4NCA3NjkuNDAzIDM1LjQxMzNDNzY5LjQwMyA0MC44MzI5IDc2NS4zNjUgNDQuNDA5OSA3NTkuMDM1IDQ0LjQwOTlINzQxLjkwN1YyNi4zMDU4WiIgZmlsbD0iIzFGMUYyOSIvPgo8cGF0aCBkPSJNNjgxLjA2OSA0Ny4wMTE1VjU5LjAxMjVINjk1LjM3OVY3MS42NzE5QzY5Mi41MjYgNzMuNDM2OCA2ODguNTI0IDc0LjMzMTkgNjgzLjQ3NyA3NC4zMzE5QzY2Ni4wMDMgNzQuMzMxOSA2NTguMDQ1IDYxLjgxMjQgNjU4LjA0NSA1MC4xOEM2NTguMDQ1IDMzLjk2MDUgNjcxLjAwOCAyNS40NzMyIDY4My44MTIgMjUuNDczMkM2OTAuNDI1IDI1LjQ2MjggNjk2LjkwOSAyNy4yODA0IDcwMi41NDEgMzAuNzIyNkw3MDMuMTU3IDMxLjEyNTRMNzA1Ljk1OCAxOC4xODZMNzA1LjY2MyAxNy45OTc3QzcwMC4wNDYgMTQuNDAwNCA2OTEuMjkxIDEyLjI1OSA2ODIuMjUxIDEyLjI1OUM2NjMuMTk3IDEyLjI1OSA2NDIuOTQ5IDI1LjM5NjcgNjQyLjk0OSA0OS43NDVDNjQyLjk0OSA2MS4wODQ1IDY0Ny4yOTMgNzAuNzE3NCA2NTUuNTExIDc3LjYwMjlDNjYzLjIyNCA4My44MjQ1IDY3Mi44NzQgODcuMTg5IDY4Mi44MDkgODcuMTIwMUM2OTQuMzYzIDg3LjEyMDEgNzAzLjA2MSA4NC42NDk1IDcwOS40MDIgNzkuNTY5Mkw3MDkuNTg5IDc5LjQxODFWNDcuMDExNUg2ODEuMDY5WiIgZmlsbD0iIzFGMUYyOSIvPgo8L2c+CjxkZWZzPgo8Y2xpcFBhdGggaWQ9ImNsaXAwXzVfMTkiPgo8cmVjdCB3aWR0aD0iNzA3Ljc3OCIgaGVpZ2h0PSI3NC44NjExIiBmaWxsPSJ3aGl0ZSIgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoMTUyLjQ0NCAxMi4yNSkiLz4KPC9jbGlwUGF0aD4KPC9kZWZzPgo8L3N2Zz4K"
|
File diff suppressed because one or more lines are too long
1
private_gpt/ui/images_icon.py
Normal file
1
private_gpt/ui/images_icon.py
Normal file
@@ -0,0 +1 @@
|
||||
avatar_svg = "data:image/svg+xml;base64,PD94bWwgdmVyc2lvbj0iMS4wIiBzdGFuZGFsb25lPSJubyI/Pgo8IURPQ1RZUEUgc3ZnIFBVQkxJQyAiLS8vVzNDLy9EVEQgU1ZHIDIwMDEwOTA0Ly9FTiIKICJodHRwOi8vd3d3LnczLm9yZy9UUi8yMDAxL1JFQy1TVkctMjAwMTA5MDQvRFREL3N2ZzEwLmR0ZCI+CjxzdmcgdmVyc2lvbj0iMS4wIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiB3aWR0aD0iNDc0LjAwMDAwMHB0IiBoZWlnaHQ9IjI0OC4wMDAwMDBwdCIgdmlld0JveD0iMCAwIDQ3NC4wMDAwMDAgMjQ4LjAwMDAwMCIKIHByZXNlcnZlQXNwZWN0UmF0aW89InhNaWRZTWlkIG1lZXQiPgoKPGcgdHJhbnNmb3JtPSJ0cmFuc2xhdGUoMC4wMDAwMDAsMjQ4LjAwMDAwMCkgc2NhbGUoMC4xMDAwMDAsLTAuMTAwMDAwKSIKZmlsbD0iIzAwMDAwMCIgc3Ryb2tlPSJub25lIj4KPHBhdGggZD0iTTIyMzIgMjExMiBjLTI3IC0zNyAtNjggLTk1IC05MiAtMTI4IGwtNDIgLTYyIC02MiAtNiBjLTMzIC00IC0xMDYKLTE2IC0xNjEgLTI3IC00OTAgLTEwMCAtODM2IC0zMDUgLTk2NiAtNTc0IC0xNDkgLTMwOSAzMCAtNjMwIDQ3MCAtODQyIDEzMwotNjQgNTA0IC0xODMgNTIwIC0xNjcgMyAyIC00MSA5MyAtOTYgMjAyIC05MiAxNzkgLTEwNCAxOTkgLTEzOSAyMTYgLTYzIDMyCi0xNDMgNDYgLTI3MiA0NiBsLTEyMyAwIDE4IDMxIGMyOSA1MSAxNzEgMTk3IDIzOSAyNDUgNjIgNDQgMjI1IDEyNyAyNzYgMTM5CjI1IDYgMjkgMiA1MyAtNDcgMzIgLTYyIDEwMSAtMTU4IDExNSAtMTU4IDYgMCAxMCA4IDEwIDE4IDAgMzIgNTggMTQxIDk5IDE4NQoyMyAyNCA1OSA1NyA4MSA3MiA0OCAzMyA1MCA0NSAxMCA0NSAtNDcgMCAtMzUgMTUgMjcgMzQgNTMgMTcgNTYgMTkgMzggMzIKLTEwIDggLTI0IDE0IC0yOSAxNCAtMjEgMCAxOSAyMCA0MSAyMCAyMCAwIDIxIDQgMTYgNTggLTcgNzEgMTQgMTI4IDUyIDE0MgoxNSA2IDUxIDEwIDgyIDEwIDQ5IDAgNTQgMiA0MiAxNiAtMjAgMjQgLTY5IDQ3IC0xMjkgNTkgLTUzIDExIC01NSAxMyAtNjggNTYKLTI0IDc4IDQgMjAxIDYxIDI3NCAxNiAyMCAxNyAxOSA1NiAtMjUgNzUgLTg0IDExMiAtMTc1IDEzOCAtMzM0IGw4IC00OSA3MAotMTkgYzg1IC0yMyAyNzIgLTExNCAzMzkgLTE2NSA0NCAtMzQgNTEgLTQ1IDcxIC0xMTMgMTIgLTQxIDI1IC04MyAzMCAtOTIgNgotMTUgMiAtMTggLTIxIC0xOCAtNDQgMCAtMjEgLTEzIDE0MSAtODAgODMgLTMzIDE4NSAtNzYgMjI4IC05MyA3MyAtMzAgNzcKLTM0IDcyIC01OCAtMyAtMTQgMSAtNDUgMTAgLTY3IGwxNyAtNDEgLTY2IC02MSBjLTcyIC02OCAtMTM1IC0xMjAgLTE0NSAtMTIwCi0zIDAgLTYyIDEwIC0xMzEgMjEgLTc4IDEzIC0xOTUgMjMgLTMxMCAyNiAtMTY3IDUgLTI0MCAxIC00MzUgLTIzIGwtNTAgLTYKLTIzIDQxIGMtMzAgNTQgLTUyIDE1OCAtNTIgMjU2IGwwIDgwIC0yMCAtMjUgYy02MSAtNzggLTg2IC0yNTAgLTU2IC0zODEgMTAKLTQxIDE5IC03OSAyMSAtODYgMyAtOSAxOCAtNyA1OSA3IDY1IDIyIDIxNiA1NCAyMjMgNDcgMyAtMyAtMTUgLTE2IC0zOSAtMjgKLTk1IC00OCAtMjE0IC0yMDUgLTIzNCAtMzA2IGwtNiAtMzMgMTYzIDAgYzU1MSAwIDEwNTkgMTYyIDEzMjcgNDIzIDMxMCAzMDMKMjQ4IDY2OSAtMTYxIDk0NiAtMjcgMTkgLTEwNCA1OSAtMTcwIDkxIC0xNzIgODEgLTMyNiAxMjkgLTUzNSAxNjYgbC0xMDQgMTgKLTMwIDY4IGMtMTcgMzcgLTM3IDY4IC00NSA2OCAtMjQgMCAtODkgLTM4IC0xMjggLTc1IC0yMSAtMTkgLTM5IC0zNCAtNDAgLTMzCi0xIDIgLTEzIDIzIC0yNyA0OCAtMzAgNTEgLTExNiAxNTUgLTE0NiAxNzYgLTE5IDEzIC0yMyA5IC03MCAtNTR6IG0tMTgyCi0yOTQgYy0xIC03IC05IC0yOCAtMTggLTQ3IC0xNiAtMzMgLTIxIC0zNSAtOTUgLTQ4IC00MiAtNyAtOTQgLTIwIC0xMTQgLTI4Ci0zMyAtMTQgLTM1IC0xNiAtMTggLTI4IDE1IC0xMiA1IC0xNyAtODYgLTQ2IC0yNDMgLTc2IC00NzIgLTE5NCAtNjM4IC0zMzAKbC00NCAtMzYgMjMgNDUgYzY5IDEzNSAyMDUgMjU2IDM5NSAzNTAgMTQ3IDczIDI2OSAxMTQgNDQ1IDE1MSAxNjEgMzMgMTUwIDMyCjE1MCAxN3ogbTg3OCAtMzggYzUyNSAtMTI4IDg0NSAtNDI4IDc5MyAtNzQzIC0zOSAtMjM3IC0yODYgLTQ1MSAtNjYxIC01NzIKLTE2MyAtNTIgLTI3OSAtNzUgLTUxNCAtOTkgbC0zOSAtNCA2IDUyIGM5IDg5IDcwIDE4NiAxNDAgMjIyIDQ3IDI1IDE0NSAyNwozNzQgOSAxMjIgLTEwIDIzMyAtMTUgMjQ3IC0xMSA1MyAxMyAzMTAgMjUzIDMyMiAzMDAgMTAgNDIgLTUgNTggLTgyIDg2IC0xMzQKNTAgLTM2MSAxNjggLTM4NCAyMDAgLTI2IDM2IC02NCAxMzIgLTc0IDE4MyAtNiAzMyAtMjIgNDggLTEzMCAxMzMgbC0xMjMgOTUKLTEyIDY3IGMtNiAzNyAtMTQgNzcgLTE3IDkwIC04IDI5IDMgMjkgMTU0IC04eiIvPgo8cGF0aCBkPSJNMjYzMCAxMzYwIGMwIC00IDcgLTExIDE1IC0xNCAxMiAtNCAxNCAtMTQgMTAgLTM5IC0xMCAtNTIgMTkgLTcxCjExMyAtNzUgbDc3IC0zIC0zOCAzNSBjLTIxIDE4IC01NSA0NiAtNzYgNjAgLTQwIDI4IC0xMDEgNTAgLTEwMSAzNnoiLz4KPC9nPgo8L3N2Zz4K"
|
BIN
private_gpt/ui/logo.png
Normal file
BIN
private_gpt/ui/logo.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 16 KiB |
454
private_gpt/ui/ui copy.py
Normal file
454
private_gpt/ui/ui copy.py
Normal file
@@ -0,0 +1,454 @@
|
||||
"""This file should be imported only and only if you want to run the UI locally."""
|
||||
|
||||
import itertools
|
||||
import logging
|
||||
import time
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import gradio as gr # type: ignore
|
||||
from fastapi import FastAPI
|
||||
from gradio.themes.utils.colors import slate # type: ignore
|
||||
from injector import inject, singleton
|
||||
from llama_index.core.llms import ChatMessage, ChatResponse, MessageRole
|
||||
from pydantic import BaseModel
|
||||
|
||||
from private_gpt.constants import PROJECT_ROOT_PATH
|
||||
from private_gpt.di import global_injector
|
||||
from private_gpt.open_ai.extensions.context_filter import ContextFilter
|
||||
from private_gpt.server.chat.chat_service import ChatService, CompletionGen
|
||||
from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
|
||||
from private_gpt.server.ingest.ingest_service import IngestService
|
||||
from private_gpt.settings.settings import settings
|
||||
from private_gpt.ui.images import logo_svg
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
THIS_DIRECTORY_RELATIVE = Path(__file__).parent.relative_to(PROJECT_ROOT_PATH)
|
||||
# Should be "private_gpt/ui/avatar-bot.ico"
|
||||
AVATAR_BOT = THIS_DIRECTORY_RELATIVE / "avatar-bot.ico"
|
||||
|
||||
UI_TAB_TITLE = "My Private GPT"
|
||||
|
||||
SOURCES_SEPARATOR = "\n\n Sources: \n"
|
||||
|
||||
MODES = ["Query Files", "Search Files", "LLM Chat (no context from files)"]
|
||||
|
||||
|
||||
class Source(BaseModel):
|
||||
file: str
|
||||
page: str
|
||||
text: str
|
||||
|
||||
class Config:
|
||||
frozen = True
|
||||
|
||||
@staticmethod
|
||||
def curate_sources(sources: list[Chunk]) -> list["Source"]:
|
||||
curated_sources = []
|
||||
|
||||
for chunk in sources:
|
||||
doc_metadata = chunk.document.doc_metadata
|
||||
|
||||
file_name = doc_metadata.get("file_name", "-") if doc_metadata else "-"
|
||||
page_label = doc_metadata.get("page_label", "-") if doc_metadata else "-"
|
||||
|
||||
source = Source(file=file_name, page=page_label, text=chunk.text)
|
||||
curated_sources.append(source)
|
||||
curated_sources = list(
|
||||
dict.fromkeys(curated_sources).keys()
|
||||
) # Unique sources only
|
||||
|
||||
return curated_sources
|
||||
|
||||
|
||||
@singleton
|
||||
class PrivateGptUi:
|
||||
@inject
|
||||
def __init__(
|
||||
self,
|
||||
ingest_service: IngestService,
|
||||
chat_service: ChatService,
|
||||
chunks_service: ChunksService,
|
||||
) -> None:
|
||||
self._ingest_service = ingest_service
|
||||
self._chat_service = chat_service
|
||||
self._chunks_service = chunks_service
|
||||
|
||||
# Cache the UI blocks
|
||||
self._ui_block = None
|
||||
|
||||
self._selected_filename = None
|
||||
|
||||
# Initialize system prompt based on default mode
|
||||
self.mode = MODES[0]
|
||||
self._system_prompt = self._get_default_system_prompt(self.mode)
|
||||
|
||||
def _chat(self, message: str, history: list[list[str]], mode: str, *_: Any) -> Any:
|
||||
def yield_deltas(completion_gen: CompletionGen) -> Iterable[str]:
|
||||
full_response: str = ""
|
||||
stream = completion_gen.response
|
||||
for delta in stream:
|
||||
if isinstance(delta, str):
|
||||
full_response += str(delta)
|
||||
elif isinstance(delta, ChatResponse):
|
||||
full_response += delta.delta or ""
|
||||
yield full_response
|
||||
time.sleep(0.02)
|
||||
|
||||
if completion_gen.sources:
|
||||
full_response += SOURCES_SEPARATOR
|
||||
cur_sources = Source.curate_sources(completion_gen.sources)
|
||||
sources_text = "\n\n\n"
|
||||
used_files = set()
|
||||
for index, source in enumerate(cur_sources, start=1):
|
||||
if (source.file + "-" + source.page) not in used_files:
|
||||
sources_text = (
|
||||
sources_text
|
||||
+ f"{index}. {source.file} (page {source.page}) \n\n"
|
||||
)
|
||||
used_files.add(source.file + "-" + source.page)
|
||||
full_response += sources_text
|
||||
yield full_response
|
||||
|
||||
def build_history() -> list[ChatMessage]:
|
||||
history_messages: list[ChatMessage] = list(
|
||||
itertools.chain(
|
||||
*[
|
||||
[
|
||||
ChatMessage(content=interaction[0], role=MessageRole.USER),
|
||||
ChatMessage(
|
||||
# Remove from history content the Sources information
|
||||
content=interaction[1].split(SOURCES_SEPARATOR)[0],
|
||||
role=MessageRole.ASSISTANT,
|
||||
),
|
||||
]
|
||||
for interaction in history
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
# max 20 messages to try to avoid context overflow
|
||||
return history_messages[:20]
|
||||
|
||||
new_message = ChatMessage(content=message, role=MessageRole.USER)
|
||||
all_messages = [*build_history(), new_message]
|
||||
# If a system prompt is set, add it as a system message
|
||||
if self._system_prompt:
|
||||
all_messages.insert(
|
||||
0,
|
||||
ChatMessage(
|
||||
content=self._system_prompt,
|
||||
role=MessageRole.SYSTEM,
|
||||
),
|
||||
)
|
||||
match mode:
|
||||
case "Query Files":
|
||||
|
||||
# Use only the selected file for the query
|
||||
context_filter = None
|
||||
if self._selected_filename is not None:
|
||||
docs_ids = []
|
||||
for ingested_document in self._ingest_service.list_ingested():
|
||||
if (
|
||||
ingested_document.doc_metadata["file_name"]
|
||||
== self._selected_filename
|
||||
):
|
||||
docs_ids.append(ingested_document.doc_id)
|
||||
context_filter = ContextFilter(docs_ids=docs_ids)
|
||||
|
||||
query_stream = self._chat_service.stream_chat(
|
||||
messages=all_messages,
|
||||
use_context=True,
|
||||
context_filter=context_filter,
|
||||
)
|
||||
yield from yield_deltas(query_stream)
|
||||
case "LLM Chat (no context from files)":
|
||||
llm_stream = self._chat_service.stream_chat(
|
||||
messages=all_messages,
|
||||
use_context=False,
|
||||
)
|
||||
yield from yield_deltas(llm_stream)
|
||||
|
||||
case "Search Files":
|
||||
response = self._chunks_service.retrieve_relevant(
|
||||
text=message, limit=4, prev_next_chunks=0
|
||||
)
|
||||
|
||||
sources = Source.curate_sources(response)
|
||||
|
||||
yield "\n\n\n".join(
|
||||
f"{index}. **{source.file} "
|
||||
f"(page {source.page})**\n "
|
||||
f"{source.text}"
|
||||
for index, source in enumerate(sources, start=1)
|
||||
)
|
||||
|
||||
# On initialization and on mode change, this function set the system prompt
|
||||
# to the default prompt based on the mode (and user settings).
|
||||
@staticmethod
|
||||
def _get_default_system_prompt(mode: str) -> str:
|
||||
p = ""
|
||||
match mode:
|
||||
# For query chat mode, obtain default system prompt from settings
|
||||
case "Query Files":
|
||||
p = settings().ui.default_query_system_prompt
|
||||
# For chat mode, obtain default system prompt from settings
|
||||
case "LLM Chat (no context from files)":
|
||||
p = settings().ui.default_chat_system_prompt
|
||||
# For any other mode, clear the system prompt
|
||||
case _:
|
||||
p = ""
|
||||
return p
|
||||
|
||||
def _set_system_prompt(self, system_prompt_input: str) -> None:
|
||||
logger.info(f"Setting system prompt to: {system_prompt_input}")
|
||||
self._system_prompt = system_prompt_input
|
||||
|
||||
def _set_current_mode(self, mode: str) -> Any:
|
||||
self.mode = mode
|
||||
self._set_system_prompt(self._get_default_system_prompt(mode))
|
||||
# Update placeholder and allow interaction if default system prompt is set
|
||||
if self._system_prompt:
|
||||
return gr.update(placeholder=self._system_prompt, interactive=True)
|
||||
# Update placeholder and disable interaction if no default system prompt is set
|
||||
else:
|
||||
return gr.update(placeholder=self._system_prompt, interactive=False)
|
||||
|
||||
def _list_ingested_files(self) -> list[list[str]]:
|
||||
files = set()
|
||||
for ingested_document in self._ingest_service.list_ingested():
|
||||
if ingested_document.doc_metadata is None:
|
||||
# Skipping documents without metadata
|
||||
continue
|
||||
file_name = ingested_document.doc_metadata.get(
|
||||
"file_name", "[FILE NAME MISSING]"
|
||||
)
|
||||
files.add(file_name)
|
||||
return [[row] for row in files]
|
||||
|
||||
def _upload_file(self, files: list[str]) -> None:
|
||||
logger.debug("Loading count=%s files", len(files))
|
||||
paths = [Path(file) for file in files]
|
||||
|
||||
# remove all existing Documents with name identical to a new file upload:
|
||||
file_names = [path.name for path in paths]
|
||||
doc_ids_to_delete = []
|
||||
for ingested_document in self._ingest_service.list_ingested():
|
||||
if (
|
||||
ingested_document.doc_metadata
|
||||
and ingested_document.doc_metadata["file_name"] in file_names
|
||||
):
|
||||
doc_ids_to_delete.append(ingested_document.doc_id)
|
||||
if len(doc_ids_to_delete) > 0:
|
||||
logger.info(
|
||||
"Uploading file(s) which were already ingested: %s document(s) will be replaced.",
|
||||
len(doc_ids_to_delete),
|
||||
)
|
||||
for doc_id in doc_ids_to_delete:
|
||||
self._ingest_service.delete(doc_id)
|
||||
|
||||
self._ingest_service.bulk_ingest([(str(path.name), path) for path in paths])
|
||||
|
||||
def _delete_all_files(self) -> Any:
|
||||
ingested_files = self._ingest_service.list_ingested()
|
||||
logger.debug("Deleting count=%s files", len(ingested_files))
|
||||
for ingested_document in ingested_files:
|
||||
self._ingest_service.delete(ingested_document.doc_id)
|
||||
return [
|
||||
gr.List(self._list_ingested_files()),
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Textbox("All files"),
|
||||
]
|
||||
|
||||
def _delete_selected_file(self) -> Any:
|
||||
logger.debug("Deleting selected %s", self._selected_filename)
|
||||
# Note: keep looping for pdf's (each page became a Document)
|
||||
for ingested_document in self._ingest_service.list_ingested():
|
||||
if (
|
||||
ingested_document.doc_metadata
|
||||
and ingested_document.doc_metadata["file_name"]
|
||||
== self._selected_filename
|
||||
):
|
||||
self._ingest_service.delete(ingested_document.doc_id)
|
||||
return [
|
||||
gr.List(self._list_ingested_files()),
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Textbox("All files"),
|
||||
]
|
||||
|
||||
def _deselect_selected_file(self) -> Any:
|
||||
self._selected_filename = None
|
||||
return [
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Button(interactive=False),
|
||||
gr.components.Textbox("All files"),
|
||||
]
|
||||
|
||||
def _selected_a_file(self, select_data: gr.SelectData) -> Any:
|
||||
self._selected_filename = select_data.value
|
||||
return [
|
||||
gr.components.Button(interactive=True),
|
||||
gr.components.Button(interactive=True),
|
||||
gr.components.Textbox(self._selected_filename),
|
||||
]
|
||||
|
||||
def _build_ui_blocks(self) -> gr.Blocks:
|
||||
logger.debug("Creating the UI blocks")
|
||||
with gr.Blocks(
|
||||
title=UI_TAB_TITLE,
|
||||
theme=gr.themes.Soft(primary_hue=slate),
|
||||
css=".logo { "
|
||||
"display:flex;"
|
||||
"background-color: #C7BAFF;"
|
||||
"height: 80px;"
|
||||
"border-radius: 8px;"
|
||||
"align-content: center;"
|
||||
"justify-content: center;"
|
||||
"align-items: center;"
|
||||
"}"
|
||||
".logo img { height: 25% }"
|
||||
".contain { display: flex !important; flex-direction: column !important; }"
|
||||
"#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }"
|
||||
"#chatbot { flex-grow: 1 !important; overflow: auto !important;}"
|
||||
"#col { height: calc(100vh - 112px - 16px) !important; }",
|
||||
) as blocks:
|
||||
with gr.Row():
|
||||
gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=PrivateGPT></div")
|
||||
|
||||
with gr.Row(equal_height=False):
|
||||
with gr.Column(scale=3):
|
||||
mode = gr.Radio(
|
||||
MODES,
|
||||
label="Mode",
|
||||
value="Query Files",
|
||||
)
|
||||
upload_button = gr.components.UploadButton(
|
||||
"Upload File(s)",
|
||||
type="filepath",
|
||||
file_count="multiple",
|
||||
size="sm",
|
||||
)
|
||||
ingested_dataset = gr.List(
|
||||
self._list_ingested_files,
|
||||
headers=["File name"],
|
||||
label="Ingested Files",
|
||||
height=235,
|
||||
interactive=False,
|
||||
render=False, # Rendered under the button
|
||||
)
|
||||
upload_button.upload(
|
||||
self._upload_file,
|
||||
inputs=upload_button,
|
||||
outputs=ingested_dataset,
|
||||
)
|
||||
ingested_dataset.change(
|
||||
self._list_ingested_files,
|
||||
outputs=ingested_dataset,
|
||||
)
|
||||
ingested_dataset.render()
|
||||
deselect_file_button = gr.components.Button(
|
||||
"De-select selected file", size="sm", interactive=False
|
||||
)
|
||||
selected_text = gr.components.Textbox(
|
||||
"All files", label="Selected for Query or Deletion", max_lines=1
|
||||
)
|
||||
delete_file_button = gr.components.Button(
|
||||
"🗑️ Delete selected file",
|
||||
size="sm",
|
||||
visible=settings().ui.delete_file_button_enabled,
|
||||
interactive=False,
|
||||
)
|
||||
delete_files_button = gr.components.Button(
|
||||
"⚠️ Delete ALL files",
|
||||
size="sm",
|
||||
visible=settings().ui.delete_all_files_button_enabled,
|
||||
)
|
||||
deselect_file_button.click(
|
||||
self._deselect_selected_file,
|
||||
outputs=[
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
ingested_dataset.select(
|
||||
fn=self._selected_a_file,
|
||||
outputs=[
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
delete_file_button.click(
|
||||
self._delete_selected_file,
|
||||
outputs=[
|
||||
ingested_dataset,
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
delete_files_button.click(
|
||||
self._delete_all_files,
|
||||
outputs=[
|
||||
ingested_dataset,
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
system_prompt_input = gr.Textbox(
|
||||
placeholder=self._system_prompt,
|
||||
label="System Prompt",
|
||||
lines=2,
|
||||
interactive=True,
|
||||
render=False,
|
||||
)
|
||||
# When mode changes, set default system prompt
|
||||
mode.change(
|
||||
self._set_current_mode, inputs=mode, outputs=system_prompt_input
|
||||
)
|
||||
# On blur, set system prompt to use in queries
|
||||
system_prompt_input.blur(
|
||||
self._set_system_prompt,
|
||||
inputs=system_prompt_input,
|
||||
)
|
||||
|
||||
with gr.Column(scale=7, elem_id="col"):
|
||||
_ = gr.ChatInterface(
|
||||
self._chat,
|
||||
chatbot=gr.Chatbot(
|
||||
label=f"LLM: {settings().llm.mode}",
|
||||
show_copy_button=True,
|
||||
elem_id="chatbot",
|
||||
render=False,
|
||||
avatar_images=(
|
||||
None,
|
||||
AVATAR_BOT,
|
||||
),
|
||||
),
|
||||
additional_inputs=[mode, upload_button, system_prompt_input],
|
||||
)
|
||||
return blocks
|
||||
|
||||
def get_ui_blocks(self) -> gr.Blocks:
|
||||
if self._ui_block is None:
|
||||
self._ui_block = self._build_ui_blocks()
|
||||
return self._ui_block
|
||||
|
||||
def mount_in_app(self, app: FastAPI, path: str) -> None:
|
||||
blocks = self.get_ui_blocks()
|
||||
blocks.queue()
|
||||
logger.info("Mounting the gradio UI, at path=%s", path)
|
||||
gr.mount_gradio_app(app, blocks, path=path)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ui = global_injector.get(PrivateGptUi)
|
||||
_blocks = ui.get_ui_blocks()
|
||||
_blocks.queue()
|
||||
_blocks.launch(debug=False, show_api=False)
|
@@ -22,6 +22,7 @@ from private_gpt.server.chunks.chunks_service import Chunk, ChunksService
|
||||
from private_gpt.server.ingest.ingest_service import IngestService
|
||||
from private_gpt.settings.settings import settings
|
||||
from private_gpt.ui.images import logo_svg
|
||||
from private_gpt.ui.images_icon import avatar_svg
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,6 +87,8 @@ class PrivateGptUi:
|
||||
self._system_prompt = self._get_default_system_prompt(self.mode)
|
||||
|
||||
def _chat(self, message: str, history: list[list[str]], mode: str, *_: Any) -> Any:
|
||||
# Modify the _chat method to set the mode to "Query Docs" by default
|
||||
mode = "Query Docs"
|
||||
def yield_deltas(completion_gen: CompletionGen) -> Iterable[str]:
|
||||
full_response: str = ""
|
||||
stream = completion_gen.response
|
||||
@@ -96,20 +99,21 @@ class PrivateGptUi:
|
||||
full_response += delta.delta or ""
|
||||
yield full_response
|
||||
time.sleep(0.02)
|
||||
|
||||
if completion_gen.sources:
|
||||
full_response += SOURCES_SEPARATOR
|
||||
cur_sources = Source.curate_sources(completion_gen.sources)
|
||||
sources_text = "\n\n\n"
|
||||
used_files = set()
|
||||
for index, source in enumerate(cur_sources, start=1):
|
||||
if (source.file + "-" + source.page) not in used_files:
|
||||
sources_text = (
|
||||
sources_text
|
||||
+ f"{index}. {source.file} (page {source.page}) \n\n"
|
||||
)
|
||||
used_files.add(source.file + "-" + source.page)
|
||||
full_response += sources_text
|
||||
|
||||
#commented the display sources method.
|
||||
#if completion_gen.sources:
|
||||
# full_response += SOURCES_SEPARATOR
|
||||
# cur_sources = Source.curate_sources(completion_gen.sources)
|
||||
# sources_text = "\n\n\n"
|
||||
# used_files = set()
|
||||
# for index, source in enumerate(cur_sources, start=1):
|
||||
# if (source.file + "-" + source.page) not in used_files:
|
||||
# sources_text = (
|
||||
# sources_text
|
||||
# + f"{index}. {source.file} (page {source.page}) \n\n"
|
||||
# )
|
||||
# used_files.add(source.file + "-" + source.page)
|
||||
# full_response += sources_text
|
||||
yield full_response
|
||||
|
||||
def build_history() -> list[ChatMessage]:
|
||||
@@ -301,122 +305,133 @@ class PrivateGptUi:
|
||||
with gr.Blocks(
|
||||
title=UI_TAB_TITLE,
|
||||
theme=gr.themes.Soft(primary_hue=slate),
|
||||
css=".logo { "
|
||||
"display:flex;"
|
||||
"background-color: #C7BAFF;"
|
||||
"height: 80px;"
|
||||
"border-radius: 8px;"
|
||||
"align-content: center;"
|
||||
"justify-content: center;"
|
||||
"align-items: center;"
|
||||
"}"
|
||||
".logo img { height: 25% }"
|
||||
".contain { display: flex !important; flex-direction: column !important; }"
|
||||
"#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }"
|
||||
"#chatbot { flex-grow: 1 !important; overflow: auto !important;}"
|
||||
"#col { height: calc(100vh - 112px - 16px) !important; }",
|
||||
css="""footer {visibility: hidden}
|
||||
<head>
|
||||
<title>MTU Teaching Assistant</title>
|
||||
<link rel="icon" type="image/png" href={avatar_svg}>
|
||||
</head>
|
||||
.title {
|
||||
white-space: pre; /* This preserves whitespace as is */}
|
||||
.logo {
|
||||
display:flex;
|
||||
background-color: #000000;
|
||||
height: 80px;
|
||||
border-radius: 8px;
|
||||
align-content: center;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
.logo img { height: 25% }
|
||||
.contain { display: flex !important; flex-direction: column !important; }"
|
||||
#component-0, #component-3, #component-10, #component-8 { height: 100% !important; }
|
||||
#chatbot { flex-grow: 1 !important; overflow: auto !important;}
|
||||
#col { height: calc(100vh - 112px - 16px) !important; }
|
||||
""",
|
||||
|
||||
|
||||
) as blocks:
|
||||
with gr.Row():
|
||||
gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=PrivateGPT></div")
|
||||
gr.HTML(f"<div class='logo'/><img src={logo_svg} alt=MTU Teaching Assistant></div"),
|
||||
gr.HTML(f"<head><link rel='icon' type='image/png' href={avatar_svg}></head")
|
||||
|
||||
with gr.Row(equal_height=False):
|
||||
with gr.Column(scale=3):
|
||||
mode = gr.Radio(
|
||||
MODES,
|
||||
label="Mode",
|
||||
value="Query Files",
|
||||
)
|
||||
upload_button = gr.components.UploadButton(
|
||||
"Upload File(s)",
|
||||
type="filepath",
|
||||
file_count="multiple",
|
||||
size="sm",
|
||||
)
|
||||
ingested_dataset = gr.List(
|
||||
self._list_ingested_files,
|
||||
headers=["File name"],
|
||||
label="Ingested Files",
|
||||
height=235,
|
||||
interactive=False,
|
||||
render=False, # Rendered under the button
|
||||
)
|
||||
upload_button.upload(
|
||||
self._upload_file,
|
||||
inputs=upload_button,
|
||||
outputs=ingested_dataset,
|
||||
)
|
||||
ingested_dataset.change(
|
||||
self._list_ingested_files,
|
||||
outputs=ingested_dataset,
|
||||
)
|
||||
ingested_dataset.render()
|
||||
deselect_file_button = gr.components.Button(
|
||||
"De-select selected file", size="sm", interactive=False
|
||||
)
|
||||
selected_text = gr.components.Textbox(
|
||||
"All files", label="Selected for Query or Deletion", max_lines=1
|
||||
)
|
||||
delete_file_button = gr.components.Button(
|
||||
"🗑️ Delete selected file",
|
||||
size="sm",
|
||||
visible=settings().ui.delete_file_button_enabled,
|
||||
interactive=False,
|
||||
)
|
||||
delete_files_button = gr.components.Button(
|
||||
"⚠️ Delete ALL files",
|
||||
size="sm",
|
||||
visible=settings().ui.delete_all_files_button_enabled,
|
||||
)
|
||||
deselect_file_button.click(
|
||||
self._deselect_selected_file,
|
||||
outputs=[
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
ingested_dataset.select(
|
||||
fn=self._selected_a_file,
|
||||
outputs=[
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
delete_file_button.click(
|
||||
self._delete_selected_file,
|
||||
outputs=[
|
||||
ingested_dataset,
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
delete_files_button.click(
|
||||
self._delete_all_files,
|
||||
outputs=[
|
||||
ingested_dataset,
|
||||
delete_file_button,
|
||||
deselect_file_button,
|
||||
selected_text,
|
||||
],
|
||||
)
|
||||
system_prompt_input = gr.Textbox(
|
||||
placeholder=self._system_prompt,
|
||||
label="System Prompt",
|
||||
lines=2,
|
||||
interactive=True,
|
||||
render=False,
|
||||
)
|
||||
# When mode changes, set default system prompt
|
||||
mode.change(
|
||||
self._set_current_mode, inputs=mode, outputs=system_prompt_input
|
||||
)
|
||||
# On blur, set system prompt to use in queries
|
||||
system_prompt_input.blur(
|
||||
self._set_system_prompt,
|
||||
inputs=system_prompt_input,
|
||||
)
|
||||
|
||||
# with gr.Row(equal_height=False):
|
||||
# with gr.Column(scale=3):
|
||||
# mode = gr.Radio(
|
||||
# MODES,
|
||||
# label="Mode",
|
||||
# value="Query Files",
|
||||
# )
|
||||
# upload_button = gr.components.UploadButton(
|
||||
# "Upload File(s)",
|
||||
# type="filepath",
|
||||
# file_count="multiple",
|
||||
# size="sm",
|
||||
# )
|
||||
# ingested_dataset = gr.List(
|
||||
# self._list_ingested_files,
|
||||
# headers=["File name"],
|
||||
# label="Ingested Files",
|
||||
# height=235,
|
||||
# interactive=False,
|
||||
# render=False, # Rendered under the button
|
||||
# )
|
||||
# upload_button.upload(
|
||||
# self._upload_file,
|
||||
# inputs=upload_button,
|
||||
# outputs=ingested_dataset,
|
||||
# )
|
||||
# ingested_dataset.change(
|
||||
# self._list_ingested_files,
|
||||
# outputs=ingested_dataset,
|
||||
# )
|
||||
# ingested_dataset.render()
|
||||
# deselect_file_button = gr.components.Button(
|
||||
# "De-select selected file", size="sm", interactive=False
|
||||
# )
|
||||
# selected_text = gr.components.Textbox(
|
||||
# "All files", label="Selected for Query or Deletion", max_lines=1
|
||||
# )
|
||||
# delete_file_button = gr.components.Button(
|
||||
# "🗑️ Delete selected file",
|
||||
# size="sm",
|
||||
# visible=settings().ui.delete_file_button_enabled,
|
||||
# interactive=False,
|
||||
# )
|
||||
# delete_files_button = gr.components.Button(
|
||||
# "⚠️ Delete ALL files",
|
||||
# size="sm",
|
||||
# visible=settings().ui.delete_all_files_button_enabled,
|
||||
# )
|
||||
# deselect_file_button.click(
|
||||
# self._deselect_selected_file,
|
||||
# outputs=[
|
||||
# delete_file_button,
|
||||
# deselect_file_button,
|
||||
# selected_text,
|
||||
# ],
|
||||
# )
|
||||
# ingested_dataset.select(
|
||||
# fn=self._selected_a_file,
|
||||
# outputs=[
|
||||
# delete_file_button,
|
||||
# deselect_file_button,
|
||||
# selected_text,
|
||||
# ],
|
||||
# )
|
||||
# delete_file_button.click(
|
||||
# self._delete_selected_file,
|
||||
# outputs=[
|
||||
# ingested_dataset,
|
||||
# delete_file_button,
|
||||
# deselect_file_button,
|
||||
# selected_text,
|
||||
# ],
|
||||
# )
|
||||
# delete_files_button.click(
|
||||
# self._delete_all_files,
|
||||
# outputs=[
|
||||
# ingested_dataset,
|
||||
# delete_file_button,
|
||||
# deselect_file_button,
|
||||
# selected_text,
|
||||
# ],
|
||||
# )
|
||||
# system_prompt_input = gr.Textbox(
|
||||
# placeholder=self._system_prompt,
|
||||
# label="System Prompt",
|
||||
# lines=2,
|
||||
# interactive=True,
|
||||
# render=False,
|
||||
# )
|
||||
# # When mode changes, set default system prompt
|
||||
# mode.change(
|
||||
# self._set_current_mode, inputs=mode, outputs=system_prompt_input
|
||||
# )
|
||||
# # On blur, set system prompt to use in queries
|
||||
# system_prompt_input.blur(
|
||||
# self._set_system_prompt,
|
||||
# inputs=system_prompt_input,
|
||||
# )
|
||||
|
||||
with gr.Column(scale=7, elem_id="col"):
|
||||
_ = gr.ChatInterface(
|
||||
@@ -431,7 +446,7 @@ class PrivateGptUi:
|
||||
AVATAR_BOT,
|
||||
),
|
||||
),
|
||||
additional_inputs=[mode, upload_button, system_prompt_input],
|
||||
#additional_inputs=[mode, upload_button, system_prompt_input],
|
||||
)
|
||||
return blocks
|
||||
|
||||
|
@@ -3,7 +3,7 @@
|
||||
# Syntax in `private_pgt/settings/settings.py`
|
||||
server:
|
||||
env_name: ${APP_ENV:prod}
|
||||
port: ${PORT:8001}
|
||||
port: ${PORT:8002}
|
||||
cors:
|
||||
enabled: false
|
||||
allow_origins: ["*"]
|
||||
|
Reference in New Issue
Block a user