Compare commits

...

200 Commits

Author SHA1 Message Date
ibuler
6235442e0f perf: change google authenticator apk download 2025-05-15 17:42:00 +08:00
Bryan
d0cb9e5432 Merge pull request #15412 from jumpserver/dev
v4.10.0
2025-05-15 17:11:43 +08:00
feng
9969395500 fix: perm del node and user group 500 2025-05-15 15:55:32 +08:00
wangruidong
e1f03a194b fix: Asset list gather account raise connection already closed 2025-05-15 15:48:19 +08:00
Eric
aa0125385a perf: fix NoneType error 2025-05-15 15:01:57 +08:00
feng
8e8579bebe perf: translate 2025-05-15 14:41:19 +08:00
feng
ad5ce5d4cf perf: translate 2025-05-15 14:01:51 +08:00
ibuler
4f009504ad perf: load custom protocols 2025-05-15 11:41:33 +08:00
ibuler
986bc926fc perf: iframe set to sameorigin 2025-05-14 19:29:16 +08:00
feng
6aafb0f01a perf: Translate 2025-05-14 18:18:45 +08:00
feng
43775096d1 perf: Login switch language 2025-05-14 17:40:09 +08:00
ibuler
f826f43495 perf: simplify db using 2025-05-14 15:11:46 +08:00
ibuler
e9ff988d8c perf: db connection close if needt 2025-05-14 14:43:14 +08:00
feng
a72e6456d9 perf: Connect method exclude face 2025-05-14 14:02:27 +08:00
ibuler
941a784a5b perf: 修改 migrations 2025-05-13 19:15:01 +08:00
ibuler
edaf9bb0b2 perf: domain enabled to gateway enabled 2025-05-13 16:36:56 +08:00
feng
e8ca177fe4 perf: translate 2025-05-13 14:50:14 +08:00
ewall555
a88ebeff15 feat: Set the default expiration days for adding user and asset permissions 2025-05-13 10:35:21 +08:00
ibuler
bd0c50a3e4 fix: account username has domain, then set again 2025-05-12 18:36:26 +08:00
feng
9f121723c4 perf: auditor add asset user view perm 2025-05-12 17:57:48 +08:00
feng
245ed79b17 perf: Translate 2025-05-12 17:05:37 +08:00
ibuler
01c07a834b perf: 修改 adhoc 翻译 2025-05-12 10:50:43 +08:00
ibuler
4fb61e0af6 perf: org id error 2025-05-12 10:24:33 +08:00
Emmanuel Ferdman
19b7be33ae Resolve warnings of logger library
Signed-off-by: Emmanuel Ferdman <emmanuelferdman@gmail.com>
2025-05-12 09:51:30 +08:00
feng
7797c76032 perf: migrate 2025-05-09 18:25:41 +08:00
老广
eb777854d4 Pr@dev@fix django version (#15374)
* fix: Downgrade django and djangorestframework versions for compatibility

* perf: Update Dockerfile with new base image tag

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: wangruidong <940853815@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-05-09 17:50:11 +08:00
Eric
22d4bd5292 perf: optimize file removal 2025-05-09 17:21:24 +08:00
ibuler
47d0d8b7a0 perf: stop dep check 2025-05-09 17:20:33 +08:00
wangruidong
5e298e5749 fix: Downgrade django and djangorestframework versions for compatibility 2025-05-09 16:10:49 +08:00
w940853815
ad3ba5034e Revert "fix: Upgrading django version causes json serialization problems"
This reverts commit ad56845d89.
2025-05-09 16:04:58 +08:00
feng
f1a1c3660d perf: Luna en translate 2025-05-09 15:14:56 +08:00
feng
eaf5bc5eb3 perf: I18n api 2025-05-09 14:46:48 +08:00
feng
30e680ad91 fix: ES the calculated quantity is 0 2025-05-08 17:23:15 +08:00
wangruidong
c9f281e8f7 fix: Handle exceptions in leak password check 2025-05-08 17:13:23 +08:00
wangruidong
ad56845d89 fix: Upgrading django version causes json serialization problems 2025-05-08 17:12:53 +08:00
feng
c3dceec3c7 perf: Connectivity add rdp error 2025-05-08 16:07:30 +08:00
wangruidong
100dad75f1 fix: i18n error 2025-05-08 14:50:54 +08:00
feng
aa52060f24 perf: Upgrade bootstrap js to 5.3.6 2025-05-08 14:12:17 +08:00
wangruidong
089a5f50f4 feat: Add LeakPasswords config 2025-05-07 17:47:22 +08:00
feng626
0bdbb6fd84 Merge pull request #15352 from jumpserver/pr@dev@connectivity_choice
perf: Connectivity choice
2025-05-07 17:30:48 +08:00
feng626
dd5bcab4ff Merge branch 'dev' into pr@dev@connectivity_choice 2025-05-07 17:30:30 +08:00
feng
49f0e51769 perf: Connectivity choice 2025-05-07 17:25:10 +08:00
wangruidong
5577e39f21 perf: Support watermark customization 2025-05-07 16:52:58 +08:00
老广
e2830ecdd6 perf: passkey auth auto mfa 2025-05-07 16:24:39 +08:00
feng
8065e04f26 perf: Translate 2025-05-07 15:50:04 +08:00
feng
fe70b60e95 perf: Users lina translate 2025-05-07 11:31:22 +08:00
feng
1f7836353a perf: translate 2025-05-07 10:32:08 +08:00
feng
a4296b3129 perf: Clean push record period 2025-05-06 18:32:56 +08:00
feng
ffc92fa7b4 perf: Clean push record period 2025-05-06 18:22:54 +08:00
github-actions[bot]
f94e032858 perf: Update Dockerfile with new base image tag 2025-04-30 11:13:34 +08:00
ibuler
25429e30ba perf: update quirements 2025-04-30 11:13:34 +08:00
CaptainB
5c7d539c6f chore: Configure Dependabot to group Python dependencies 2025-04-30 11:04:21 +08:00
fit2bot
a8a6e03428 perf: update deps 2025-04-30 10:58:19 +08:00
wangruidong
71b9b2df74 fix: Add AdminConnectionToken to operate log exclude_models 2025-04-30 10:51:25 +08:00
Eric
f1bc69b253 perf: add luna i18n 2025-04-29 18:12:10 +08:00
wangruidong
282ca25504 perf: Skip alert if login city seen in past 7 days 2025-04-29 17:46:10 +08:00
fit2bot
1bb44e783a perf: some i18n (#15312)
Co-authored-by: ibuler <ibuler@qq.com>
2025-04-29 17:45:29 +08:00
feng
a64fe4b0be perf: Account translate 2025-04-29 14:44:22 +08:00
feng
a75faf8da6 perf: Discover account translate 2025-04-29 14:36:13 +08:00
Bryan
537a9325a3 Update README.md (#15305)
* Update README.md

* Update README.md

* Update README.md

* Update README.md

* Update README.md

* Update README.md

* Update README.md

* Update README.md

* Update README.md
2025-04-28 11:36:05 +08:00
feng
a9d455e867 perf: ntlm_err 2025-04-27 18:53:43 +08:00
feng
d06d26ac54 perf: Display asset/account connectivity error message 2025-04-27 18:50:00 +08:00
fit2bot
e992c44e11 perf: change lfs files download (#15293)
* perf: change lfs files download

* perf: clean unused ansible module

* perf: update lfs download

* perf: Update Dockerfile with new base image tag

* perf: change download path

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-27 14:35:10 +08:00
feng
24fe058fd9 perf: lina translate 2025-04-25 18:07:09 +08:00
feng
a3fef9cc54 perf: Update the key when the integration-application is updated or created 2025-04-25 17:29:44 +08:00
ibuler
471053e62a perf: change mcp integrate 2025-04-25 17:28:03 +08:00
jiangweidong
dc6308b030 perf: if the apply-asset-ticket name is 128 characters long, will raise 500 2025-04-25 17:27:13 +08:00
feng
f016ae6161 perf: add sftplog command models field index 2025-04-25 15:21:55 +08:00
feng
14a8d877e0 perf: ko translate 2025-04-25 15:04:03 +08:00
feng
ddf20570a1 perf: device support ad 2025-04-23 19:38:01 +08:00
feng
1ad9616b7f perf: gather facts gpu info 2025-04-22 17:48:21 +08:00
刘瑞斌
d7bc6bb201 chore: use uv as package-ecosystem 2025-04-21 13:36:24 +08:00
feng
f855043468 perf: luna ru translate 2025-04-21 11:34:51 +08:00
fit2bot
3159a4e794 perf: change domain to zone (#15255)
* perf: change domain to zone

* perf: change domain to zone

* perf: change some word

* perf: update gateway enabled i18n

* perf: change migrations

---------

Co-authored-by: ibuler <ibuler@qq.com>
2025-04-21 10:30:18 +08:00
feng
57fcebfdd3 fix: No data found for the carrying organization 2025-04-18 16:50:07 +08:00
feng626
c500bb4e4c Revert "Revert "perf:Stored command records in ES support accurate searching.""
This reverts commit 6bc1c5bd50.
2025-04-18 16:50:07 +08:00
feng
fd062b0da6 perf: ru translate 2025-04-18 14:52:44 +08:00
ibuler
bcb112d5c6 perf: user profile api 2025-04-18 14:11:56 +08:00
fit2bot
533dbf316c perf: add ali rds dependencies (#15247)
* perf: add ali rds dependencies

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: Eric <xplzv@126.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-18 12:11:51 +08:00
github-actions[bot]
9cce94b709 perf: Update Dockerfile with new base image tag 2025-04-18 11:53:40 +08:00
Eric
8b815d812b perf: modify Dockerfile-base 2025-04-18 11:53:40 +08:00
github-actions[bot]
a168fc8a62 perf: Update Dockerfile with new base image tag 2025-04-18 11:29:07 +08:00
Eric
faae1a09d1 perf: lint dependencies 2025-04-18 11:29:07 +08:00
github-actions[bot]
26e819e120 perf: Update Dockerfile with new base image tag 2025-04-18 11:29:07 +08:00
Eric
79579654a1 perf: use uv tool
perf: add Homepage
perf: add env
2025-04-18 11:29:07 +08:00
老广
558188da90 merge: dev to master
Ready to relase
2025-04-17 20:24:45 +08:00
feng626
6bc1c5bd50 Revert "perf:Stored command records in ES support accurate searching."
This reverts commit 3d6d2af268.
2025-04-17 20:16:06 +08:00
ibuler
36f312b943 perf: page queryset mixin 2025-04-17 19:47:51 +08:00
ibuler
11811c453b perf: page queryset mixin 2025-04-17 19:47:51 +08:00
ibuler
12fadeec58 perf: revert terminal api 2025-04-17 19:34:23 +08:00
ibuler
b49fd21e08 perf: 虚拟账号 api 2025-04-17 17:50:42 +08:00
feng
9b982eb592 perf: change secret api perm 2025-04-17 17:15:06 +08:00
wangruidong
31652ef5b1 fix: include openid in source validation logic 2025-04-17 16:24:00 +08:00
feng
8fef18b991 perf: gather account windows playbook failed_when: false 2025-04-17 15:55:49 +08:00
ibuler
c804c053d2 perf: revert api 2025-04-17 15:53:52 +08:00
ibuler
bef2282604 perf: asset list compute account amount 2025-04-17 15:15:36 +08:00
feng
cabc069045 perf: Translate 2025-04-17 15:12:16 +08:00
feng
99c9a021b7 fix: update applet host (platform failed) 2025-04-17 14:55:36 +08:00
ibuler
6cb3cc1f29 perf: 修改 DS 的一些翻译 2025-04-17 14:47:11 +08:00
feng
67422ef4ba fix: automation no account 2025-04-17 14:00:24 +08:00
gerry
3d6d2af268 perf:Stored command records in ES support accurate searching. 2025-04-17 11:43:16 +08:00
wangruidong
ee97e45cc3 fix: Allow superusers delete adhoc and playbook 2025-04-17 10:54:47 +08:00
feng
0131eaa6db perf: es search 2025-04-16 18:15:58 +08:00
feng
eaa390fd6f perf: update asset directory_services allow_empty true 2025-04-16 17:39:55 +08:00
ibuler
e2b8fd0d40 perf: change account filter by asset 2025-04-16 17:37:36 +08:00
feng
2aace05099 perf: as account username 2025-04-16 17:29:23 +08:00
ibuler
1ee70af93d perf: applet account select 2025-04-16 16:43:34 +08:00
feng
fa70fb2921 perf: Translate 2025-04-16 15:37:11 +08:00
ibuler
01a6019022 perf: swagger api 2025-04-16 14:53:51 +08:00
wangruidong
5c61a11d82 fix: add periodic_display to read_only_fields in Job serializer 2025-04-16 14:18:11 +08:00
fit2bot
67f3341310 perf: change db prefetch (#15215) 2025-04-16 13:48:12 +08:00
feng
cb49e26387 perf: refresh asset type tree 2025-04-16 11:44:07 +08:00
feng
314da330c0 perf: Asset account filter 2025-04-16 11:36:58 +08:00
halo
f1c98fda34 perf: client version 2025-04-16 10:39:10 +08:00
ibuler
1fdd1036d3 perf: directory service db 2025-04-15 20:24:10 +08:00
feng
e286997090 perf: koko translate 2025-04-15 17:26:06 +08:00
wangruidong
ce3daf5496 fix: update translation strings and improve error handling in inventory and job modules 2025-04-15 16:49:35 +08:00
feng
631570b819 perf: Asset filter 2025-04-15 16:45:50 +08:00
feng
9b1bff0847 perf: client version 2025-04-15 12:28:59 +08:00
feng
ee8a2afe16 fix: ES no data found 2025-04-15 12:07:56 +08:00
CaptainB
1a01c0537c chore: Add Dependabot configuration for pip dependencies 2025-04-15 11:47:59 +08:00
jiangweidong
64393fe695 fix: Error in using set method 2025-04-15 11:47:01 +08:00
jiangweidong
11ef4fab4e perf: Es subsequent optimization 2025-04-15 11:47:01 +08:00
jiangweidong
9f8256f885 fix: Solve the problem that log details cannot be viewed in non-default organizations 2025-04-15 11:47:01 +08:00
fit2bot
5390fbacec perf: some swagger api (#15203)
* perf: some swagger api

* perf: update deps

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-15 11:43:36 +08:00
wangruidong
8b9fe3c72b perf: add default logo handling in IntegrationApplicationSerializer 2025-04-15 11:08:15 +08:00
wangruidong
20070e0647 fix: improve crontab validation logic 2025-04-15 11:07:23 +08:00
ibuler
47b72cb35e perf: add leak password db config 2025-04-14 17:27:27 +08:00
feng
2ca0e9a5a2 perf: automation .account -> .all_accounts 2025-04-14 14:18:56 +08:00
feng
3b2ac101c8 perf: windows ad asset info 2025-04-14 10:14:38 +08:00
feng
6795f036dd perf: ad asset automation gather_facts_enabled true 2025-04-11 17:53:43 +08:00
feng
aaa1f48258 perf: koko translate 2025-04-11 17:45:58 +08:00
feng
53c5bab203 perf: user login acl remove warning notify_and_warn action 2025-04-10 15:56:31 +08:00
feng
1254d28463 perf: windows ad gather account 2025-04-10 15:33:42 +08:00
wangruidong
d6b1a577fc fix: resolve ForeignKeyViolation in AccountRisk 2025-04-09 19:30:05 +08:00
wangruidong
5ab85d3561 perf: File directory adjustment 2025-04-09 19:29:52 +08:00
wangruidong
467f4c5d4f perf: Translate 2025-04-09 19:29:52 +08:00
wangruidong
f2404319af fix: account_prefer 2025-04-09 19:29:52 +08:00
wangruidong
bbeadf7dbe perf: optimize adhoc asset selection experience 2025-04-09 19:29:52 +08:00
feng
941bd9b3f4 perf: Translate 2025-04-09 19:22:32 +08:00
feng
37a307a9d0 perf: Windows AD 2025-04-09 18:22:00 +08:00
ibuler
528f9045d0 perf: update connection token 2025-04-09 11:11:39 +08:00
ibuler
a317549a01 perf: migrations merge 2025-04-08 19:21:37 +08:00
ibuler
0f5681de7d chore: remove workflow 2025-04-08 19:21:37 +08:00
ibuler
a7c514f8d8 perf: rename some workd 2025-04-08 19:21:37 +08:00
ibuler
75ea0079a2 perf: update ad domain 2025-04-08 19:21:37 +08:00
ibuler
4cc1687bf8 perf: update ad 2025-04-08 19:21:37 +08:00
ibuler
76e57b9a3e perf: update ad 2025-04-08 19:21:37 +08:00
ibuler
ba3bce1e2e perf: perm account valid 2025-04-08 19:21:37 +08:00
ibuler
45f0343cfa perf: update ds 2025-04-08 19:21:37 +08:00
ibuler
acaa4cf2d5 perf: rename ad to ds 2025-04-08 19:21:37 +08:00
ibuler
3f452daee8 perf: ad as asset 2025-04-08 19:21:37 +08:00
feng
5e25361ee8 perf: Operate default log 2025-04-08 18:37:25 +08:00
jiangweidong
7b7604e14d Added cloud sync global released asset tab (v4.9) 2025-04-08 18:31:53 +08:00
Eric
f9037878c3 perf: add remoteapp bitmapcache settings 2025-04-08 17:36:48 +08:00
fit2bot
29ddfcac17 fix: Optimize UserConfirmDialog to send code via email (#15164)
* fix: Optimize UserConfirmDialog to send code via email

* fix: Optimize verification failure without error reporting

---------

Co-authored-by: halo <wuyihuangw@gmail.com>
Co-authored-by: Bryan <jiangjie.bai@fit2cloud.com>
2025-04-08 15:50:15 +08:00
wangruidong
519ec65ad4 perf: LDAP strict sync 2025-04-08 15:43:15 +08:00
jiangweidong
1f60e328b6 perf: Export resources to add operation logs 2025-04-08 15:37:29 +08:00
Bai
e8e0ea920b perf: change jumpserver.org to jumpserver.com 2025-04-08 14:23:28 +08:00
Aaron3S
4fd8efd043 feat: remove oracle dyn port 2025-04-08 13:50:30 +08:00
wangruidong
623c800d31 fix: failure when deleting remote account in SQL Server 2025-04-08 13:48:48 +08:00
wangruidong
d2c6e3c7a6 fix: Job audit: Search job list, filter failures based on command 2025-04-08 13:41:59 +08:00
github-actions[bot]
dc5883576d Auto-translate README 2025-04-08 13:34:21 +08:00
Bai
0a9c9fb227 perf: change readme 2025-04-08 13:26:48 +08:00
Bai
15a1a58eca perf: change support email 2025-04-08 13:20:22 +08:00
halo
782401ef86 fix: Implement function _check_code 2025-04-07 16:20:43 +08:00
maninhill
8abcd201bc chore: Update README.md 2025-04-03 15:11:38 +08:00
Bryan
cdbc10ac72 Update README.md 2025-03-31 17:37:01 +08:00
Bryan
ceeef890e6 Update README.md 2025-03-31 17:37:01 +08:00
Bryan
dc8a172884 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
62115e43bb Update README.md 2025-03-28 17:53:22 +08:00
Bryan
5eced85e69 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
ec99b17b76 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
84569720c3 Update README.md 2025-03-28 17:53:22 +08:00
feng
65984d38f1 perf: Account filter 2025-03-28 16:34:53 +08:00
feng
f6913ac63c perf: Priacy mode 2025-03-27 18:34:11 +08:00
Halo
514b2cdfc5 feat: Email as a method for multi-factor authentication (#15134)
* feat: Email as a method for multi-factor authentication

* perf: Optimize the MFA email sending logic

* perf: Optimize some parameters

* perf: Translate
2025-03-27 17:26:38 +08:00
ibuler
b55000663e perf: 提升服务注册安全性 2025-03-27 16:25:28 +08:00
ibuler
9ed822bb3e perf: 优化获取 labels 2025-03-27 16:23:14 +08:00
feng
ea599d7695 perf: Perm the template push account 2025-03-27 14:13:29 +08:00
feng
01c5d68b35 perf: change secret change_secret_result 2025-03-27 14:08:19 +08:00
feng
2e2c331941 perf: translate 2025-03-25 18:03:10 +08:00
feng
266ea9b858 perf: Change secret 2025-03-25 16:05:47 +08:00
feng
5f2e838342 perf: koko sftp translate 2025-03-25 14:33:50 +08:00
ZhaoJiSen
544ad5532b Merge pull request #15117 from jumpserver/pr@dev@koko_translate
perf: koko translate
2025-03-25 11:02:08 +08:00
feng
d22d715ee7 perf: koko translate 2025-03-25 10:59:45 +08:00
halo
dd2366532c perf: Use a domain account to avoid automatically creating a local account 2025-03-25 10:22:16 +08:00
ibuler
9667a3d340 perf: add crontab check min 60m 2025-03-25 09:56:37 +08:00
feng
c8e6e5d38c perf: Login language 2025-03-24 18:55:40 +08:00
feng
9d1047fae2 perf: Translate 2025-03-24 16:51:49 +08:00
feng
28f97d746d perf: Translate 2025-03-24 15:39:37 +08:00
ibuler
be72344c63 perf: update tk create 2025-03-24 10:54:17 +08:00
ibuler
d3176b68a8 perf: 优化 admin token 判断 2025-03-24 10:06:22 +08:00
ibuler
5411f65546 perf: update get permed account 2025-03-24 09:51:46 +08:00
ibuler
e3ba468004 fix: 修复 token 直连的问题 2025-03-24 09:51:46 +08:00
feng
a03a11efa4 perf: Translate 2025-03-21 15:49:42 +08:00
feng
d344495417 perf: Translate 2025-03-21 14:24:37 +08:00
github-actions[bot]
9412bd0331 Auto-translate README 2025-03-21 13:29:01 +08:00
Bai
8d73ddb1cd perf: update readme languages 2025-03-21 13:17:56 +08:00
Bai
7fe56a5e1a perf: README 2025-03-21 10:35:27 +08:00
321 changed files with 29535 additions and 20227 deletions

View File

@@ -8,4 +8,6 @@ celerybeat.pid
.vagrant/
apps/xpack/.git
.history/
.idea
.idea
.venv/
.env

4
.gitattributes vendored
View File

@@ -1,4 +0,0 @@
*.mmdb filter=lfs diff=lfs merge=lfs -text
*.mo filter=lfs diff=lfs merge=lfs -text
*.ipdb filter=lfs diff=lfs merge=lfs -text
leak_passwords.db filter=lfs diff=lfs merge=lfs -text

14
.github/dependabot.yml.bak vendored Normal file
View File

@@ -0,0 +1,14 @@
version: 2
updates:
- package-ecosystem: "uv"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:30"
timezone: "Asia/Shanghai"
target-branch: dev
groups:
python-dependencies:
patterns:
- "*"

View File

@@ -2,10 +2,14 @@ name: Translate README
on:
workflow_dispatch:
inputs:
source_readme:
description: "Source README"
required: false
default: "./readmes/README.en.md"
target_langs:
description: "Target Languages"
required: false
default: "zh-hans,zh-hant,ja,pt-br"
default: "zh-hans,zh-hant,ja,pt-br,es,ru"
gen_dir_path:
description: "Generate Dir Name"
required: false
@@ -34,6 +38,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
GPT_MODE: ${{ github.event.inputs.gpt_mode }}
SOURCE_README: ${{ github.event.inputs.source_readme }}
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}

3
.gitignore vendored
View File

@@ -46,3 +46,6 @@ test.py
.test/
*.mo
apps.iml
*.db
*.mmdb
*.ipdb

View File

@@ -1,4 +1,4 @@
FROM jumpserver/core-base:20250224_065619 AS stage-build
FROM jumpserver/core-base:20250509_094529 AS stage-build
ARG VERSION

View File

@@ -1,6 +1,6 @@
FROM python:3.11-slim-bullseye
ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies
ARG DEPENDENCIES=" \
ca-certificates \
@@ -43,18 +43,19 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=poetry.lock,target=poetry.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \
&& python3 -m venv /opt/py3 \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& . /opt/py3/bin/activate \
&& poetry config virtualenvs.create false \
&& poetry install --no-cache --only main \
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
&& bash clean_site_packages.sh \
&& poetry cache clear pypi --all
&& uv venv \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
&& ln -sf $(pwd)/.venv /opt/py3 \
&& bash utils/static_files.sh \
&& bash clean_site_packages.sh

View File

@@ -24,11 +24,7 @@ RUN set -ex \
WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
COPY poetry.lock pyproject.toml ./
RUN set -ex \
&& . /opt/py3/bin/activate \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& poetry install --only xpack \
&& poetry cache clear pypi --all
RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack

View File

@@ -1,16 +1,18 @@
<div align="center">
<a name="readme-top"></a>
<a href="https://jumpserver.org/index-en.html"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link]
[![][docs-shield]][docs-link]
[![][deepwiki-shield]][deepwiki-link]
[![][discord-shield]][discord-link]
[![][docker-shield]][docker-link]
[![][github-release-shield]][github-release-link]
[![][github-stars-shield]][github-stars-link]
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md)
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md)
</div>
<br/>
@@ -19,7 +21,13 @@
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
![JumpServer Overview](https://github.com/jumpserver/jumpserver/assets/32935519/35a371cb-8590-40ed-88ec-f351f8cf9045)
<picture>
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
</picture>
## Quickstart
@@ -36,18 +44,19 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
[![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
## Screenshots
<table style="border-collapse: collapse; border: 1px solid black;">
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>    
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>     
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
@@ -69,9 +78,9 @@ JumpServer consists of multiple key components, which collectively form the func
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
@@ -81,12 +90,6 @@ JumpServer consists of multiple key components, which collectively form the func
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
## Security
JumpServer is a mission critical product. Please refer to the Basic Security Recommendations for installation and deployment. If you encounter any security-related issues, please contact us directly:
- Email: support@fit2cloud.com
## License
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
@@ -100,6 +103,7 @@ Unless required by applicable law or agreed to in writing, software distributed
<!-- JumpServer official link -->
[docs-link]: https://jumpserver.com/docs
[discord-link]: https://discord.com/invite/W6vYXmAQG2
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
<!-- JumpServer Other link-->
@@ -110,10 +114,10 @@ Unless required by applicable law or agreed to in writing, software distributed
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues
<!-- Shield link-->
[docs-shield]: https://img.shields.io/badge/documentation-148F76
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square   
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
<!-- Image link -->

View File

@@ -5,8 +5,7 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
如果你发现安全问题,请直接联系我们,我们携手让世界更好:
- ibuler@fit2cloud.com
- support@fit2cloud.com
- 400-052-0755
- support@lxware.hk
# Security Policy
@@ -16,6 +15,5 @@ JumpServer is a security product, The installation and development should follow
All security bugs should be reported to the contact as below:
- ibuler@fit2cloud.com
- support@fit2cloud.com
- 400-052-0755
- support@lxware.hk

View File

@@ -46,6 +46,16 @@ class AccountViewSet(OrgBulkModelViewSet):
}
export_as_zip = True
def get_queryset(self):
queryset = super().get_queryset()
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
if not asset_id:
return queryset
asset = get_object_or_404(Asset, pk=asset_id)
queryset = asset.all_accounts.all()
return queryset
@action(methods=['get'], detail=False, url_path='su-from-accounts')
def su_from_accounts(self, request, *args, **kwargs):
account_id = request.query_params.get('account')
@@ -117,7 +127,7 @@ class AccountViewSet(OrgBulkModelViewSet):
self.model.objects.create(**account_data)
success_count += 1
except Exception as e:
logger.debug(f'{ "Move" if move else "Copy" } to assets error: {e}')
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}')
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]

View File

@@ -62,8 +62,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
)
def get_once_secret(self, request, *args, **kwargs):
instance = self.get_object()
secret = instance.get_secret()
return Response(data={'id': instance.id, 'secret': secret})
return Response(data={'id': instance.id, 'secret': instance.secret})
@action(['GET'], detail=False, url_path='account-secret',
permission_classes=[RBACPermission])

View File

@@ -17,7 +17,7 @@ from orgs.mixins import generics
__all__ = [
'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
'AutomationExecutionViewSet', 'RecordListMixin'
'AutomationExecutionViewSet'
]
@@ -39,9 +39,10 @@ class AutomationAssetsListApi(generics.ListAPIView):
return assets
class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
class AutomationRemoveAssetApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@@ -56,9 +57,10 @@ class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
return Response({'msg': 'ok'})
class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
class AutomationAddAssetApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@@ -72,9 +74,10 @@ class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
return Response({"error": serializer.errors})
class AutomationNodeAddRemoveApi(generics.RetrieveUpdateAPIView):
class AutomationNodeAddRemoveApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateNodeSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
action_params = ['add', 'remove']
@@ -124,12 +127,3 @@ class AutomationExecutionViewSet(
execution = self.get_object()
report = execution.manager.gen_report()
return HttpResponse(report)
class RecordListMixin:
def list(self, request, *args, **kwargs):
try:
response = super().list(request, *args, **kwargs)
except Exception as e:
response = Response({'detail': str(e)}, status=status.HTTP_400_BAD_REQUEST)
return response

View File

@@ -16,7 +16,7 @@ from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from rbac.permissions import RBACPermission
from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
)
__all__ = [
@@ -35,7 +35,7 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
serializer_class = serializers.ChangeSecretAutomationSerializer
class ChangeSecretRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = ChangeSecretRecordFilterSet
permission_classes = [RBACPermission, IsValidLicense]
search_fields = ('asset__address', 'account__username')

View File

@@ -147,6 +147,7 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
serializer_class = serializers.CheckAccountEngineSerializer
permission_classes = [RBACPermission, IsValidLicense]
perm_model = CheckAccountEngine
http_method_names = ['get', 'options']
def get_queryset(self):
return CheckAccountEngine.get_default_engines()

View File

@@ -9,7 +9,7 @@ from accounts.models import PushAccountAutomation, PushSecretRecord
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
)
__all__ = [
@@ -42,7 +42,7 @@ class PushAccountExecutionViewSet(AutomationExecutionViewSet):
return queryset
class PushAccountRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = PushAccountRecordFilterSet
search_fields = ('asset__address', 'account__username')
ordering_fields = ('date_finished',)

View File

@@ -10,7 +10,7 @@ from accounts.models import BaseAccountQuerySet
from accounts.utils import SecretGenerator
from assets.automations.base.manager import BasePlaybookManager
from assets.const import HostTypes
from common.db.utils import safe_db_connection
from common.db.utils import safe_atomic_db_connection
from common.utils import get_logger
logger = get_logger(__name__)
@@ -69,7 +69,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
return
asset = privilege_account.asset
accounts = asset.accounts.all()
accounts = asset.all_accounts.all()
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
if self.secret_type:
@@ -94,6 +94,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
h['account'] = {
'name': account.name,
'username': account.username,
'full_username': account.full_username,
'secret_type': secret_type,
'secret': account.escape_jinja2_syntax(new_secret),
'private_key_path': private_key_path,
@@ -169,7 +170,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
)
super().on_host_success(host, result)
with safe_db_connection():
with safe_atomic_db_connection():
account.save(update_fields=['secret', 'date_updated', 'date_change_secret', 'change_secret_status'])
self.save_record(recorder)
@@ -197,6 +198,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
)
super().on_host_error(host, error, result)
with safe_db_connection():
with safe_atomic_db_connection():
account.save(update_fields=['change_secret_status', 'date_change_secret', 'date_updated'])
self.save_record(recorder)

View File

@@ -41,6 +41,7 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@@ -83,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@@ -101,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password" and check_conn_after_change
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@@ -112,5 +116,7 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key" and check_conn_after_change
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@@ -41,6 +41,7 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@@ -83,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@@ -101,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password" and check_conn_after_change
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@@ -112,5 +116,7 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key" and check_conn_after_change
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@@ -0,0 +1,27 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Change password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@@ -0,0 +1,27 @@
id: change_secret_ad_windows
name: "{{ 'Windows account change secret' | trans }}"
version: 1
method: change_secret
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account change secret:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
en: 'Using Ansible module win_domain_user to change Windows account secret'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
size 2547712

View File

@@ -12,6 +12,7 @@ from accounts.models import Account, AccountRisk, RiskChoice
from assets.automations.base.manager import BaseManager
from common.const import ConfirmOrIgnore
from common.decorators import bulk_create_decorator, bulk_update_decorator
from settings.models import LeakPasswords
@bulk_create_decorator(AccountRisk)
@@ -157,10 +158,8 @@ class CheckLeakHandler(BaseCheckHandler):
if not account.secret:
return False
sql = 'SELECT 1 FROM passwords WHERE password = ? LIMIT 1'
self.cursor.execute(sql, (account.secret,))
leak = self.cursor.fetchone() is not None
return leak
is_exist = LeakPasswords.objects.using('sqlite').filter(password=account.secret).exists()
return is_exist
def clean(self):
self.cursor.close()

View File

@@ -13,6 +13,7 @@ def parse_date(date_str, default=None):
formats = [
'%Y/%m/%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%d-%m-%Y %H:%M:%S',
'%Y/%m/%d',
'%d-%m-%Y',
@@ -26,7 +27,6 @@ def parse_date(date_str, default=None):
return default
# TODO 后期会挪到 playbook 中
class GatherAccountsFilter:
def __init__(self, tp):
self.tp = tp
@@ -208,14 +208,35 @@ class GatherAccountsFilter:
key, value = parts
user_info[key.strip()] = value.strip()
detail = {'groups': user_info.get('Global Group memberships', ''), }
user = {
'username': user_info.get('User name', ''),
'date_password_change': parse_date(user_info.get('Password last set', '')),
'date_password_expired': parse_date(user_info.get('Password expires', '')),
'date_last_login': parse_date(user_info.get('Last logon', '')),
username = user_info.get('User name')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('Password last set')),
'date_password_expired': parse_date(user_info.get('Password expires')),
'date_last_login': parse_date(user_info.get('Last logon')),
'groups': detail,
}
return result
@staticmethod
def windows_ad_filter(info):
result = {}
for user_info in info['user_details']:
detail = {'groups': user_info.get('GlobalGroupMemberships', ''), }
username = user_info.get('SamAccountName')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('PasswordLastSet')),
'date_password_expired': parse_date(user_info.get('PasswordExpires')),
'date_last_login': parse_date(user_info.get('LastLogonDate')),
'groups': detail,
}
result[user['username']] = user
return result
@staticmethod

View File

@@ -4,6 +4,7 @@
- name: Run net user command to get all users
win_shell: net user
register: user_list_output
failed_when: false
- name: Parse all users from net user command
set_fact:

View File

@@ -2,10 +2,13 @@ id: gather_accounts_windows
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category: host
category:
- host
type:
- windows
i18n:
Windows account gather:
zh: 使用命令 net user 收集 Windows 账号

View File

@@ -0,0 +1,74 @@
- hosts: demo
gather_facts: no
tasks:
- name: Import ActiveDirectory module
win_shell: Import-Module ActiveDirectory
args:
warn: false
- name: Get the SamAccountName list of all AD users
win_shell: |
Import-Module ActiveDirectory
Get-ADUser -Filter * | Select-Object -ExpandProperty SamAccountName
register: ad_user_list
- name: Set the all_users variable
set_fact:
all_users: "{{ ad_user_list.stdout_lines }}"
- name: Get detailed information for each user
win_shell: |
Import-Module ActiveDirectory
$user = Get-ADUser -Identity {{ item }} -Properties Name, SamAccountName, Enabled, LastLogonDate, PasswordLastSet, msDS-UserPasswordExpiryTimeComputed, MemberOf
$globalGroups = @()
if ($user.MemberOf) {
$globalGroups = $user.MemberOf | ForEach-Object {
try {
$group = Get-ADGroup $_ -ErrorAction Stop
if ($group.GroupScope -eq 'Global') { $group.Name }
} catch {
}
}
}
$passwordExpiry = $null
$expiryRaw = $user.'msDS-UserPasswordExpiryTimeComputed'
if ($expiryRaw) {
try {
$passwordExpiry = [datetime]::FromFileTime($expiryRaw)
} catch {
$passwordExpiry = $null
}
}
$output = [PSCustomObject]@{
Name = $user.Name
SamAccountName = $user.SamAccountName
Enabled = $user.Enabled
LastLogonDate = if ($user.LastLogonDate) { $user.LastLogonDate.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordLastSet = if ($user.PasswordLastSet) { $user.PasswordLastSet.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordExpires = if ($passwordExpiry) { $passwordExpiry.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
GlobalGroupMemberships = $globalGroups
}
$output | ConvertTo-Json -Depth 3
loop: "{{ all_users }}"
register: ad_user_details
ignore_errors: yes
- set_fact:
info:
user_details: >-
{{
ad_user_details.results
| selectattr('rc', 'equalto', 0)
| map(attribute='stdout')
| select('truthy')
| map('from_json')
}}
- debug:
var: info

View File

@@ -0,0 +1,15 @@
id: gather_accounts_windows_ad
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category:
- ds
type:
- windows_ad
i18n:
Windows account gather:
zh: 使用命令 Get-ADUser 收集 Windows 账号
ja: コマンド Get-ADUser を使用して Windows アカウントを収集する
en: Using command Get-ADUser to gather accounts

View File

@@ -1,6 +1,6 @@
import time
from collections import defaultdict
import time
from django.utils import timezone
from accounts.const import AutomationTypes
@@ -222,6 +222,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
def _collect_asset_account_info(self, asset, info):
result = self._filter_success_result(asset.type, info)
accounts = []
for username, info in result.items():
self.asset_usernames_mapper[str(asset.id)].add(username)
@@ -373,6 +374,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
for asset, accounts_data in self.asset_account_info.items():
ori_users = self.ori_asset_usernames[str(asset.id)]
need_analyser_gather_account = []
with tmp_to_org(asset.org_id):
for d in accounts_data:
username = d["username"]
@@ -385,10 +387,11 @@ class GatherAccountsManager(AccountBasePlaybookManager):
ga = ori_account
self.update_gathered_account(ori_account, d)
ori_found = username in ori_users
risk_analyser.analyse_risk(asset, ga, d, ori_found)
need_analyser_gather_account.append((asset, ga, d, ori_found))
self.create_gathered_account.finish()
self.update_gathered_account.finish()
for analysis_data in need_analyser_gather_account:
risk_analyser.analyse_risk(*analysis_data)
self.update_gather_accounts_status(asset)
if not self.is_sync_account:
continue

View File

@@ -41,6 +41,7 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@@ -83,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@@ -101,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password" and check_conn_after_change
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@@ -112,6 +116,8 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key" and check_conn_after_change
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@@ -41,6 +41,7 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@@ -83,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@@ -101,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password" and check_conn_after_change
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@@ -112,6 +116,8 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key" and check_conn_after_change
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@@ -0,0 +1,27 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Push user password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@@ -0,0 +1,25 @@
id: push_account_ad_windows
name: "{{ 'Windows account push' | trans }}"
version: 1
method: push_account
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account push:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号推送'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントをプッシュする'
en: 'Using Ansible module win_domain_user to push account'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@@ -11,4 +11,5 @@
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: "{{ jms_asset.spec_info.db_name }}"
script: "DROP USER {{ account.username }}"
script: "DROP LOGIN {{ account.username }}; select @@version"

View File

@@ -0,0 +1,9 @@
- hosts: windows
gather_facts: no
tasks:
- name: "Remove account"
ansible.windows.win_domain_user:
name: "{{ account.username }}"
state: absent

View File

@@ -0,0 +1,14 @@
id: remove_account_ad_windows
name: "{{ 'Windows account remove' | trans }}"
version: 1
method: remove_account
category:
- ds
type:
- windows_ad
i18n:
Windows account remove:
zh: 使用 Ansible 模块 win_domain_user 删除账号
ja: Ansible モジュール win_domain_user を使用してアカウントを削除する
en: Use the Ansible module win_domain_user to delete an account

View File

@@ -10,6 +10,6 @@
rdp_ping:
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
login_user: "{{ account.username }}"
login_user: "{{ account.full_username }}"
login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}"

View File

@@ -2,8 +2,10 @@ id: verify_account_by_rdp
name: "{{ 'Windows rdp account verify' | trans }}"
category:
- host
- ds
type:
- windows
- windows_ad
method: verify_account
protocol: rdp
priority: 1

View File

@@ -7,5 +7,5 @@
- name: Verify account
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.username }}"
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"

View File

@@ -2,9 +2,12 @@ id: verify_account_windows
name: "{{ 'Windows account verify' | trans }}"
version: 1
method: verify_account
category: host
category:
- host
- ds
type:
- windows
- windows_ad
i18n:
Windows account verify:

View File

@@ -42,7 +42,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
if host.get('error'):
return host
accounts = asset.accounts.all()
accounts = asset.all_accounts.all()
accounts = self.get_accounts(account, accounts)
inventory_hosts = []
@@ -64,6 +64,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
h['account'] = {
'name': account.name,
'username': account.username,
'full_username': account.full_username,
'secret_type': account.secret_type,
'secret': account.escape_jinja2_syntax(secret),
'private_key_path': private_key_path,
@@ -84,6 +85,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
def on_host_error(self, host, error, result):
account = self.host_account_mapper.get(host)
try:
account.set_connectivity(Connectivity.ERR)
error_tp = account.get_err_connectivity(error)
account.set_connectivity(error_tp)
except Exception as e:
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')

View File

@@ -5,7 +5,6 @@ import uuid
import django_filters
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django_filters import rest_framework as drf_filters
from rest_framework import filters
from rest_framework.compat import coreapi
@@ -13,11 +12,26 @@ from rest_framework.compat import coreapi
from assets.models import Node
from assets.utils import get_node_from_request
from common.drf.filters import BaseFilterSet
from common.utils import get_logger
from common.utils.timezone import local_zero_hour, local_now
from .const.automation import ChangeSecretRecordStatusChoice
from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \
AutomationExecution
logger = get_logger(__file__)
class UUIDFilterMixin:
@staticmethod
def filter_uuid(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
logger.warning(f"Invalid UUID: {value}")
return queryset.none()
return queryset.filter(**{name: value})
class NodeFilterBackend(filters.BaseFilterBackend):
fields = ['node_id']
@@ -43,14 +57,15 @@ class NodeFilterBackend(filters.BaseFilterBackend):
return queryset
class AccountFilterSet(BaseFilterSet):
class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
ip = drf_filters.CharFilter(field_name="address", lookup_expr="exact")
name = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
hostname = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
username = drf_filters.CharFilter(field_name="username", lookup_expr="exact")
address = drf_filters.CharFilter(field_name="asset__address", lookup_expr="exact")
asset_id = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
asset = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
assets = drf_filters.CharFilter(field_name="asset_id", lookup_expr="exact")
asset_name = drf_filters.CharFilter(field_name="asset__name", lookup_expr="exact")
asset_id = drf_filters.CharFilter(field_name="asset", method="filter_uuid")
assets = drf_filters.CharFilter(field_name="asset_id", method="filter_uuid")
has_secret = drf_filters.BooleanFilter(method="filter_has_secret")
platform = drf_filters.CharFilter(
field_name="asset__platform_id", lookup_expr="exact"
@@ -135,8 +150,9 @@ class AccountFilterSet(BaseFilterSet):
kwargs.update({"date_change_secret__gt": date})
if name == "latest_secret_change_failed":
queryset = queryset.filter(date_change_secret__gt=date).exclude(
change_secret_status=ChangeSecretRecordStatusChoice.success
queryset = (
queryset.filter(date_change_secret__gt=date)
.exclude(change_secret_status=ChangeSecretRecordStatusChoice.success)
)
if kwargs:
@@ -146,8 +162,8 @@ class AccountFilterSet(BaseFilterSet):
class Meta:
model = Account
fields = [
"id", "asset", "source_id", "secret_type", "category",
"type", "privileged", "secret_reset", "connectivity", 'is_active'
"id", "source_id", "secret_type", "category", "type",
"privileged", "secret_reset", "connectivity", "is_active"
]
@@ -185,16 +201,6 @@ class SecretRecordMixin(drf_filters.FilterSet):
return queryset.filter(date_finished__gte=dt)
class UUIDExecutionFilterMixin:
@staticmethod
def filter_execution(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
raise ValueError(_('Enter a valid UUID.'))
return queryset.filter(**{name: value})
class DaysExecutionFilterMixin:
days = drf_filters.NumberFilter(method="filter_days")
field: str
@@ -209,10 +215,10 @@ class DaysExecutionFilterMixin:
class ChangeSecretRecordFilterSet(
SecretRecordMixin, UUIDExecutionFilterMixin,
SecretRecordMixin, UUIDFilterMixin,
DaysExecutionFilterMixin, BaseFilterSet
):
execution_id = django_filters.CharFilter(method="filter_execution")
execution_id = django_filters.CharFilter(method="filter_uuid")
days = drf_filters.NumberFilter(method="filter_days")
field = 'date_finished'
@@ -230,8 +236,8 @@ class AutomationExecutionFilterSet(DaysExecutionFilterMixin, BaseFilterSet):
fields = ["days", 'trigger', 'automation_id', 'automation__name']
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDExecutionFilterMixin, BaseFilterSet):
execution_id = django_filters.CharFilter(method="filter_execution")
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterSet):
execution_id = django_filters.CharFilter(method="filter_uuid")
class Meta:
model = PushSecretRecord

View File

@@ -629,10 +629,15 @@ class Migration(migrations.Migration):
name="connectivity",
field=models.CharField(
choices=[
("-", "Unknown"),
("na", "N/A"),
("ok", "OK"),
("err", "Error"),
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default="-",
max_length=16,

View File

@@ -0,0 +1,29 @@
# Generated by Django 4.1.13 on 2025-05-06 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0006_alter_accountrisk_username_and_more'),
]
operations = [
migrations.AlterField(
model_name='account',
name='connectivity',
field=models.CharField(choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('rdp_err', 'RDP error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default='-', max_length=16, verbose_name='Connectivity'),
),
]

View File

@@ -1,65 +1,15 @@
from rest_framework.response import Response
from rest_framework import status
from django.db.models import Model
from django.utils import translation
from django.utils.translation import gettext_noop
from audits.const import ActionChoices
from common.views.mixins import RecordViewLogMixin
from common.utils import i18n_fmt
from audits.handler import create_or_update_operate_log
class AccountRecordViewLogMixin(RecordViewLogMixin):
class AccountRecordViewLogMixin(object):
get_object: callable
get_queryset: callable
@staticmethod
def _filter_params(params):
new_params = {}
need_pop_params = ('format', 'order')
for key, value in params.items():
if key in need_pop_params:
continue
if isinstance(value, list):
value = list(filter(None, value))
if value:
new_params[key] = value
return new_params
def get_resource_display(self, request):
query_params = dict(request.query_params)
params = self._filter_params(query_params)
spm_filter = params.pop("spm", None)
if not params and not spm_filter:
display_message = gettext_noop("Export all")
elif spm_filter:
display_message = gettext_noop("Export only selected items")
else:
query = ",".join(
["%s=%s" % (key, value) for key, value in params.items()]
)
display_message = i18n_fmt(gettext_noop("Export filtered: %s"), query)
return display_message
@property
def detail_msg(self):
return i18n_fmt(
gettext_noop('User %s view/export secret'), self.request.user
)
def list(self, request, *args, **kwargs):
list_func = getattr(super(), 'list')
if not callable(list_func):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = list_func(request, *args, **kwargs)
with translation.override('en'):
resource_display = self.get_resource_display(request)
ids = [q.id for q in self.get_queryset()]
self.record_logs(
ids, ActionChoices.view, self.detail_msg, resource_display=resource_display
)
return response
model: Model
def retrieve(self, request, *args, **kwargs):
retrieve_func = getattr(super(), 'retrieve')
@@ -67,9 +17,9 @@ class AccountRecordViewLogMixin(RecordViewLogMixin):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = retrieve_func(request, *args, **kwargs)
with translation.override('en'):
resource = self.get_object()
self.record_logs(
[resource.id], ActionChoices.view, self.detail_msg, resource=resource
create_or_update_operate_log(
ActionChoices.view, self.model._meta.verbose_name,
force=True, resource=self.get_object(),
)
return response

View File

@@ -131,9 +131,49 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
@lazyproperty
def alias(self):
"""
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
"""
if self.username.startswith('@'):
return self.username
return self.name
return str(self.id)
def is_virtual(self):
"""
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
"""
return self.alias.startswith('@')
def is_ds_account(self):
if self.is_virtual():
return ''
if not self.asset.is_directory_service:
return False
return True
@lazyproperty
def ds(self):
if not self.is_ds_account():
return None
return self.asset.ds
@lazyproperty
def ds_domain(self):
"""这个不能去掉perm_account 会动态设置这个值,以更改 full_username"""
if self.is_virtual():
return ''
if self.ds and self.ds.domain_name:
return self.ds.domain_name
return ''
def username_has_domain(self):
return '@' in self.username or '\\' in self.username
@property
def full_username(self):
if not self.username_has_domain() and self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username
@lazyproperty
def has_secret(self):

View File

@@ -92,8 +92,9 @@ class VirtualAccount(JMSOrgBaseModel):
from .account import Account
username = user.username
alias = AliasAccount.USER.value
with tmp_to_org(asset.org):
same_account = cls.objects.filter(alias='@USER').first()
same_account = cls.objects.filter(alias=alias).first()
secret = ''
if same_account and same_account.secret_from_login:
@@ -101,4 +102,6 @@ class VirtualAccount(JMSOrgBaseModel):
if not secret and not from_permed:
secret = input_secret
return Account(name=AliasAccount.USER.label, username=username, secret=secret)
account = Account(name=AliasAccount.USER.label, username=username, secret=secret)
account.alias = alias
return account

View File

@@ -233,6 +233,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
required=False, queryset=Account.objects, allow_null=True, allow_empty=True,
label=_('Su from'), attrs=('id', 'name', 'username')
)
ds = ObjectRelatedField(read_only=True, label=_('Directory service'), attrs=('id', 'name', 'domain_name'))
class Meta(BaseAccountSerializer.Meta):
model = Account
@@ -241,7 +242,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
'date_change_secret', 'change_secret_status'
]
fields = BaseAccountSerializer.Meta.fields + [
'su_from', 'asset', 'version',
'su_from', 'asset', 'version', 'ds',
'source', 'source_id', 'secret_reset',
] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields
read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields
@@ -258,7 +259,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
queryset = queryset.prefetch_related(
'asset', 'asset__platform',
'asset__platform__automation'
).prefetch_related('labels', 'labels__label')
)
return queryset

View File

@@ -1,9 +1,11 @@
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from accounts.models import IntegrationApplication
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
from common.serializers.fields import JSONManyToManyField
from common.utils import random_string
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
@@ -27,13 +29,18 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
'name': {'label': _('Name')},
'accounts_amount': {'label': _('Accounts amount')},
'is_active': {'default': True},
'logo': {'required': False},
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
request_method = self.context.get('request').method
if request_method == 'PUT':
self.fields['logo'].required = False
def to_representation(self, instance):
data = super().to_representation(instance)
if not data.get('logo'):
data['logo'] = static('img/logo.png')
return data
def validate(self, attrs):
attrs['secret'] = random_string(36)
return attrs
class IntegrationAccountSecretSerializer(serializers.Serializer):

View File

@@ -107,16 +107,18 @@ def execute_automation_record_task(record_ids, tp):
)
@register_as_period_task(crontab=CRONTAB_AT_AM_THREE)
def clean_change_secret_and_push_record_period():
from accounts.models import ChangeSecretRecord
from accounts.models import ChangeSecretRecord, PushSecretRecord
print('Start clean change secret and push record period')
with tmp_to_root_org():
now = timezone.now()
days = get_log_keep_day('ACCOUNT_CHANGE_SECRET_RECORD_KEEP_DAYS')
expired_day = now - datetime.timedelta(days=days)
records = ChangeSecretRecord.objects.filter(
date_updated__lt=expired_day
).filter(
Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
)
expired_time = now - datetime.timedelta(days=days)
records.delete()
null_related_q = Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
expired_q = Q(date_updated__lt=expired_time)
ChangeSecretRecord.objects.filter(null_related_q).delete()
ChangeSecretRecord.objects.filter(expired_q).delete()
PushSecretRecord.objects.filter(null_related_q).delete()
PushSecretRecord.objects.filter(expired_q).delete()

View File

@@ -129,7 +129,7 @@
</tbody>
</table>
{% else %}
<p class="no-data">{% trans 'No new accounts found' %}</p>
<p class="no-data">{% trans 'No lost accounts found' %}</p>
{% endif %}
</div>
</section>

View File

@@ -8,6 +8,6 @@ class ActionChoices(models.TextChoices):
review = 'review', _('Review')
warning = 'warning', _('Warn')
notice = 'notice', _('Notify')
notify_and_warn = 'notify_and_warn', _('Notify and warn')
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
face_verify = 'face_verify', _('Face Verify')
face_online = 'face_online', _('Face Online')

View File

@@ -32,9 +32,9 @@ class CommandFilterACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
class Meta(BaseSerializer.Meta):
model = CommandFilterACL
fields = BaseSerializer.Meta.fields + ['command_groups']
action_choices_exclude = [ActionChoices.notice,
ActionChoices.face_verify,
ActionChoices.face_online]
action_choices_exclude = [
ActionChoices.notice, ActionChoices.face_verify, ActionChoices.face_online
]
class CommandReviewSerializer(serializers.Serializer):

View File

@@ -14,5 +14,6 @@ class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
if i not in ['assets', 'accounts']
]
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
ActionChoices.review, ActionChoices.accept, ActionChoices.notice
ActionChoices.review, ActionChoices.accept, ActionChoices.notice,
ActionChoices.face_verify, ActionChoices.face_online
]

View File

@@ -18,7 +18,12 @@ class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
class Meta(BaseUserACLSerializer.Meta):
model = LoginACL
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
action_choices_exclude = [ActionChoices.face_online, ActionChoices.face_verify]
action_choices_exclude = [
ActionChoices.warning,
ActionChoices.notify_and_warn,
ActionChoices.face_online,
ActionChoices.face_verify
]
def get_rules_serializer(self):
return RuleSerializer()

View File

@@ -1,10 +1,10 @@
from .asset import *
from .category import *
from .domain import *
from .favorite_asset import *
from .mixin import *
from .my_asset import *
from .node import *
from .platform import *
from .protocol import *
from .tree import *
from .my_asset import *
from .zone import *

View File

@@ -3,6 +3,7 @@ from .cloud import *
from .custom import *
from .database import *
from .device import *
from .ds import *
from .gpt import *
from .host import *
from .permission import *

View File

@@ -11,6 +11,7 @@ from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK
from accounts.serializers import AccountSerializer
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
from assets import serializers
from assets.exceptions import NotSupportedTemporarilyError
@@ -36,12 +37,12 @@ class AssetFilterSet(BaseFilterSet):
platform = drf_filters.CharFilter(method='filter_platform')
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
domain = drf_filters.CharFilter(method='filter_domain')
zone = drf_filters.CharFilter(method='filter_zone')
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
protocols = drf_filters.CharFilter(method='filter_protocols')
domain_enabled = drf_filters.BooleanFilter(
field_name="platform__domain_enabled", lookup_expr="exact"
gateway_enabled = drf_filters.BooleanFilter(
field_name="platform__gateway_enabled", lookup_expr="exact"
)
ping_enabled = drf_filters.BooleanFilter(
field_name="platform__automation__ping_enabled", lookup_expr="exact"
@@ -84,11 +85,11 @@ class AssetFilterSet(BaseFilterSet):
return queryset
@staticmethod
def filter_domain(queryset, name, value):
def filter_zone(queryset, name, value):
if is_uuid(value):
return queryset.filter(domain_id=value)
return queryset.filter(zone_id=value)
else:
return queryset.filter(domain__name__contains=value)
return queryset.filter(zone__name__contains=value)
@staticmethod
def filter_protocols(queryset, name, value):
@@ -96,10 +97,10 @@ class AssetFilterSet(BaseFilterSet):
return queryset.filter(protocols__name__in=value).distinct()
class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
"""
API endpoint that allows Asset to be viewed or edited.
class BaseAssetViewSet(OrgBulkModelViewSet):
"""
API endpoint that allows Asset to be viewed or edited.
"""
model = Asset
filterset_class = AssetFilterSet
search_fields = ("name", "address", "comment")
@@ -109,18 +110,19 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
("platform", serializers.PlatformSerializer),
("suggestion", serializers.MiniAssetSerializer),
("gateways", serializers.GatewaySerializer),
("accounts", AccountSerializer),
)
rbac_perms = (
("match", "assets.match_asset"),
("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"),
("accounts", "assets.view_account"),
("spec_info", "assets.view_asset"),
("gathered_info", "assets.view_asset"),
("sync_platform_protocols", "assets.change_asset"),
)
extra_filter_backends = [
IpInFilterBackend,
NodeFilterBackend, AttrRulesFilterBackend
IpInFilterBackend, NodeFilterBackend, AttrRulesFilterBackend
]
def perform_destroy(self, instance):
@@ -141,6 +143,25 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
return retrieve_cls
return cls
def paginate_queryset(self, queryset):
page = super().paginate_queryset(queryset)
if page:
page = Asset.compute_all_accounts_amount(page)
return page
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="platform")
def platform(self, *args, **kwargs):
asset = super().get_object()
@@ -150,10 +171,10 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
@action(methods=["GET"], detail=True, url_path="gateways")
def gateways(self, *args, **kwargs):
asset = self.get_object()
if not asset.domain:
if not asset.zone:
gateways = Gateway.objects.none()
else:
gateways = asset.domain.gateways
gateways = asset.zone.gateways
return self.get_paginated_response_from_queryset(gateways)
@action(methods=['post'], detail=False, url_path='sync-platform-protocols')
@@ -189,17 +210,6 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
Protocol.objects.bulk_create(objs)
return Response(status=status.HTTP_200_OK)
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
def filter_bulk_update_data(self):
bulk_data = []
skip_assets = []

View File

@@ -1,12 +1,12 @@
from assets.models import Cloud, Asset
from assets.serializers import CloudSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['CloudViewSet']
class CloudViewSet(AssetViewSet):
class CloudViewSet(BaseAssetViewSet):
model = Cloud
perm_model = Asset

View File

@@ -1,12 +1,12 @@
from assets.models import Custom, Asset
from assets.serializers import CustomSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['CustomViewSet']
class CustomViewSet(AssetViewSet):
class CustomViewSet(BaseAssetViewSet):
model = Custom
perm_model = Asset

View File

@@ -1,12 +1,12 @@
from assets.models import Database, Asset
from assets.serializers import DatabaseSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['DatabaseViewSet']
class DatabaseViewSet(AssetViewSet):
class DatabaseViewSet(BaseAssetViewSet):
model = Database
perm_model = Asset

View File

@@ -1,11 +1,11 @@
from assets.serializers import DeviceSerializer
from assets.models import Device, Asset
from .asset import AssetViewSet
from assets.serializers import DeviceSerializer
from .asset import BaseAssetViewSet
__all__ = ['DeviceViewSet']
class DeviceViewSet(AssetViewSet):
class DeviceViewSet(BaseAssetViewSet):
model = Device
perm_model = Asset

View File

@@ -0,0 +1,16 @@
from assets.models import DirectoryService, Asset
from assets.serializers import DSSerializer
from .asset import BaseAssetViewSet
__all__ = ['DSViewSet']
class DSViewSet(BaseAssetViewSet):
model = DirectoryService
perm_model = Asset
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
serializer_classes['default'] = DSSerializer
return serializer_classes

View File

@@ -1,12 +1,12 @@
from assets.models import GPT, Asset
from assets.serializers import GPTSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['GPTViewSet']
class GPTViewSet(AssetViewSet):
class GPTViewSet(BaseAssetViewSet):
model = GPT
perm_model = Asset

View File

@@ -1,11 +1,11 @@
from assets.models import Host, Asset
from assets.serializers import HostSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['HostViewSet']
class HostViewSet(AssetViewSet):
class HostViewSet(BaseAssetViewSet):
model = Host
perm_model = Asset

View File

@@ -1,12 +1,12 @@
from assets.models import Web, Asset
from assets.serializers import WebSerializer
from .asset import AssetViewSet
from .asset import BaseAssetViewSet
__all__ = ['WebViewSet']
class WebViewSet(AssetViewSet):
class WebViewSet(BaseAssetViewSet):
model = Web
perm_model = Asset

View File

@@ -52,7 +52,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
queryset = (
super().get_queryset()
.annotate(assets_amount=Coalesce(Subquery(asset_count_subquery), Value(0)))
.prefetch_related('protocols', 'automation', 'labels', 'labels__label')
.prefetch_related('protocols', 'automation')
)
queryset = queryset.filter(type__in=AllTypes.get_types_values())
return queryset

View File

@@ -9,24 +9,24 @@ from common.utils import get_logger
from orgs.mixins.api import OrgBulkModelViewSet
from .asset import HostViewSet
from .. import serializers
from ..models import Domain, Gateway
from ..models import Zone, Gateway
logger = get_logger(__file__)
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
class DomainViewSet(OrgBulkModelViewSet):
model = Domain
class ZoneViewSet(OrgBulkModelViewSet):
model = Zone
filterset_fields = ("name",)
search_fields = filterset_fields
serializer_classes = {
'default': serializers.DomainSerializer,
'list': serializers.DomainListSerializer,
'default': serializers.ZoneSerializer,
'list': serializers.ZoneListSerializer,
}
def get_serializer_class(self):
if self.request.query_params.get('gateway'):
return serializers.DomainWithGatewaySerializer
return serializers.ZoneWithGatewaySerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
@@ -36,8 +36,8 @@ class DomainViewSet(OrgBulkModelViewSet):
class GatewayViewSet(HostViewSet):
perm_model = Gateway
filterset_fields = ("domain__name", "name", "domain")
search_fields = ("domain__name",)
filterset_fields = ("zone__name", "name", "zone")
search_fields = ("zone__name",)
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
@@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
return serializer_classes
def get_queryset(self):
queryset = Domain.get_gateway_queryset()
queryset = Zone.get_gateway_queryset()
return queryset
@@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
}
def get_queryset(self):
queryset = Domain.get_gateway_queryset()
queryset = Zone.get_gateway_queryset()
return queryset
def post(self, request, *args, **kwargs):

View File

@@ -3,10 +3,10 @@ import json
import logging
import os
import shutil
import time
from collections import defaultdict
from socket import gethostname
import time
import yaml
from django.conf import settings
from django.template.loader import render_to_string
@@ -17,7 +17,7 @@ from sshtunnel import SSHTunnelForwarder
from assets.automations.methods import platform_automation_methods
from common.const import Status
from common.db.utils import safe_db_connection
from common.db.utils import safe_atomic_db_connection
from common.tasks import send_mail_async
from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen
from ops.ansible import JMSInventory, DefaultCallback, SuperPlaybookRunner
@@ -123,7 +123,7 @@ class BaseManager:
self.execution.result = self.result
self.execution.status = self.status
with safe_db_connection():
with safe_atomic_db_connection():
self.execution.save()
def print_summary(self):
@@ -334,7 +334,8 @@ class PlaybookPrepareMixin:
return sub_playbook_path
def check_automation_enabled(self, platform, assets):
if not platform.automation or not platform.automation.ansible_enabled:
automation = getattr(platform, 'automation', None)
if not (automation and getattr(automation, 'ansible_enabled', False)):
print(_(" - Platform {} ansible disabled").format(platform.name))
self.on_assets_not_ansible_enabled(assets)
return False

View File

@@ -1,3 +1,5 @@
from collections import Counter
__all__ = ['FormatAssetInfo']
@@ -7,13 +9,37 @@ class FormatAssetInfo:
self.tp = tp
@staticmethod
def posix_format(info):
for cpu_model in info.get('cpu_model', []):
if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
break
else:
cpu_model = ''
info['cpu_model'] = cpu_model[:48]
def get_cpu_model_count(cpus):
try:
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
model_counts = Counter(models)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing CPU model list: {e}")
result = ''
return result
@staticmethod
def get_gpu_model_count(gpus):
try:
model_counts = Counter(gpus)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing GPU model list: {e}")
result = ''
return result
def posix_format(self, info):
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
info['gpu_model'] = gpus
info['cpu_model'] = cpus
info['cpu_count'] = info.get('cpu_count', 0)
return info

View File

@@ -23,5 +23,16 @@
arch: "{{ ansible_architecture }}"
kernel: "{{ ansible_kernel }}"
- name: Get GPU info with nvidia-smi
shell: |
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
register: gpu_info
ignore_errors: yes
- name: Merge GPU info into final info
set_fact:
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
- debug:
var: info

View File

@@ -2,9 +2,12 @@ id: gather_facts_windows
name: "{{ 'Gather facts windows' | trans }}"
version: 1
method: gather_facts
category: host
category:
- host
- ds
type:
- windows
- windows_ad
i18n:
Gather facts windows:
zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息'

View File

@@ -3,8 +3,10 @@ name: "{{ 'Ping by pyfreerdp' | trans }}"
category:
- device
- host
- ds
type:
- windows
- windows_ad
method: ping
protocol: rdp
priority: 1

View File

@@ -3,6 +3,7 @@ name: "{{ 'Ping by paramiko' | trans }}"
category:
- device
- host
- ds
type:
- all
method: ping

View File

@@ -3,6 +3,7 @@ name: "{{ 'Ping by telnet' | trans }}"
category:
- device
- host
- ds
type:
- all
method: ping

View File

@@ -2,9 +2,12 @@ id: win_ping
name: "{{ 'Windows ping' | trans }}"
version: 1
method: ping
category: host
category:
- host
- ds
type:
- windows
- windows_ad
i18n:
Windows ping:
zh: 使用 Ansible 模块 内置模块 win_ping 来测试可连接性

View File

@@ -37,10 +37,11 @@ class PingManager(BasePlaybookManager):
def on_host_error(self, host, error, result):
asset, account = self.host_asset_and_account_mapper.get(host)
try:
asset.set_connectivity(Connectivity.ERR)
error_tp = asset.get_err_connectivity(error)
asset.set_connectivity(error_tp)
if not account:
return
account.set_connectivity(Connectivity.ERR)
account.set_connectivity(error_tp)
except Exception as e:
print(f'\033[31m Update account {account.name} or '
f'update asset {asset.name} connectivity failed: {e} \033[0m\n')

View File

@@ -7,6 +7,12 @@ class Connectivity(TextChoices):
NA = 'na', _('N/A')
OK = 'ok', _('OK')
ERR = 'err', _('Error')
RDP_ERR = 'rdp_err', _('RDP error')
AUTH_ERR = 'auth_err', _('Authentication error')
PASSWORD_ERR = 'password_err', _('Invalid password error')
OPENSSH_KEY_ERR = 'openssh_key_err', _('OpenSSH key error')
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
class AutomationTypes(TextChoices):

View File

@@ -37,7 +37,7 @@ class FillType(models.TextChoices):
class BaseType(TextChoices):
"""
约束应该考虑代是对平台对限制,避免多余对选项,如: mysql 开启 ssh,
或者开启了也没有作用, 比如 k8s 开启了 domain目前还不支持
或者开启了也没有作用, 比如 k8s 开启了 gateway 目前还不支持
"""
@classmethod
@@ -112,8 +112,7 @@ class BaseType(TextChoices):
@classmethod
def get_choices(cls):
if not settings.XPACK_LICENSE_IS_VALID:
choices = cls.choices
if not settings.XPACK_LICENSE_IS_VALID and hasattr(cls, 'get_community_types'):
choices = [(tp.value, tp.label) for tp in cls.get_community_types()]
else:
choices = cls.choices
return choices

View File

@@ -12,6 +12,7 @@ class Category(ChoicesMixin, models.TextChoices):
DATABASE = 'database', _("Database")
CLOUD = 'cloud', _("Cloud service")
WEB = 'web', _("Web")
DS = 'ds', _("Directory service")
CUSTOM = 'custom', _("Custom type")
@classmethod

View File

@@ -13,11 +13,11 @@ class CloudTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
},
cls.K8S: {
'domain_enabled': True,
'gateway_enabled': True,
}
}

View File

@@ -20,7 +20,7 @@ class CustomTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
},
}

View File

@@ -20,7 +20,7 @@ class DatabaseTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': True,
'gateway_enabled': True,
'su_enabled': False,
}
}

View File

@@ -19,7 +19,8 @@ class DeviceTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': True,
'gateway_enabled': True,
'ds_enabled': True,
'su_enabled': True,
'su_methods': ['enable', 'super', 'super_level']
}

70
apps/assets/const/ds.py Normal file
View File

@@ -0,0 +1,70 @@
from django.utils.translation import gettext_lazy as _
from .base import BaseType
class DirectoryTypes(BaseType):
GENERAL = 'general', _('General')
# LDAP = 'ldap', _('LDAP')
# AD = 'ad', _('Active Directory')
WINDOWS_AD = 'windows_ad', _('Windows Active Directory')
# AZURE_AD = 'azure_ad', _('Azure Active Directory')
@classmethod
def _get_base_constrains(cls) -> dict:
return {
'*': {
'charset_enabled': True,
'gateway_enabled': True,
'ds_enabled': False,
'su_enabled': True,
},
cls.WINDOWS_AD: {
'su_enabled': False,
}
}
@classmethod
def _get_automation_constrains(cls) -> dict:
constrains = {
'*': {
'ansible_enabled': False,
},
cls.WINDOWS_AD: {
'ansible_enabled': True,
'ping_enabled': True,
'gather_facts_enabled': True,
'verify_account_enabled': True,
'change_secret_enabled': True,
'push_account_enabled': True,
'gather_accounts_enabled': True,
'remove_account_enabled': True,
}
}
return constrains
@classmethod
def _get_protocol_constrains(cls) -> dict:
return {
cls.GENERAL: {
'choices': ['ssh']
},
cls.WINDOWS_AD: {
'choices': ['rdp', 'ssh', 'vnc', 'winrm']
},
}
@classmethod
def internal_platforms(cls):
return {
cls.WINDOWS_AD: [
{'name': 'Windows Active Directory'}
],
}
@classmethod
def get_community_types(cls):
return [
cls.GENERAL,
]

View File

@@ -11,7 +11,7 @@ class GPTTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
}
}

View File

@@ -18,8 +18,9 @@ class HostTypes(BaseType):
'*': {
'charset_enabled': True,
'charset': 'utf-8', # default
'domain_enabled': True,
'gateway_enabled': True,
'su_enabled': True,
'ds_enabled': True,
'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'],
},
cls.WINDOWS: {
@@ -56,7 +57,6 @@ class HostTypes(BaseType):
'change_secret_enabled': True,
'push_account_enabled': True,
'remove_account_enabled': True,
},
cls.WINDOWS: {
'ansible_config': {
@@ -69,7 +69,6 @@ class HostTypes(BaseType):
'ping_enabled': False,
'gather_facts_enabled': False,
'gather_accounts_enabled': False,
'verify_account_enabled': False,
'change_secret_enabled': False,
'push_account_enabled': False
},
@@ -82,7 +81,7 @@ class HostTypes(BaseType):
{'name': 'Linux'},
{
'name': GATEWAY_NAME,
'domain_enabled': True,
'gateway_enabled': True,
}
],
cls.UNIX: [
@@ -126,5 +125,5 @@ class HostTypes(BaseType):
@classmethod
def get_community_types(cls) -> list:
return [
cls.LINUX, cls.UNIX, cls.WINDOWS, cls.OTHER_HOST
cls.LINUX, cls.WINDOWS, cls.UNIX, cls.OTHER_HOST
]

View File

@@ -344,6 +344,20 @@ class Protocol(ChoicesMixin, models.TextChoices):
if not xpack_enabled and config.get('xpack', False):
continue
protocols.append(protocol)
from assets.models.platform import PlatformProtocol
custom_protocols = (
PlatformProtocol.objects
.filter(platform__category='custom')
.values_list('name', flat=True)
.distinct()
)
for protocol in custom_protocols:
if protocol not in protocols:
if not protocol:
continue
label = protocol[0].upper() + protocol[1:]
protocols.append({'label': label, 'value': protocol})
return protocols
@classmethod

View File

@@ -13,6 +13,7 @@ from .cloud import CloudTypes
from .custom import CustomTypes
from .database import DatabaseTypes
from .device import DeviceTypes
from .ds import DirectoryTypes
from .gpt import GPTTypes
from .host import HostTypes
from .web import WebTypes
@@ -22,7 +23,8 @@ class AllTypes(ChoicesMixin):
choices: list
includes = [
HostTypes, DeviceTypes, DatabaseTypes,
CloudTypes, WebTypes, CustomTypes, GPTTypes
CloudTypes, WebTypes, CustomTypes,
DirectoryTypes, GPTTypes
]
_category_constrains = {}
_automation_methods = None
@@ -173,6 +175,7 @@ class AllTypes(ChoicesMixin):
(Category.DATABASE, DatabaseTypes),
(Category.WEB, WebTypes),
(Category.CLOUD, CloudTypes),
(Category.DS, DirectoryTypes),
(Category.CUSTOM, CustomTypes)
]
return types
@@ -309,7 +312,7 @@ class AllTypes(ChoicesMixin):
'category': category,
'type': tp, 'internal': True,
'charset': constraints.get('charset', 'utf-8'),
'domain_enabled': constraints.get('domain_enabled', False),
'gateway_enabled': constraints.get('gateway_enabled', False),
'su_enabled': constraints.get('su_enabled', False),
}
if data['su_enabled'] and data.get('su_methods'):

View File

@@ -11,7 +11,7 @@ class WebTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'domain_enabled': False,
'gateway_enabled': False,
'su_enabled': False,
}
}

View File

@@ -29,8 +29,19 @@ class Migration(migrations.Migration):
('org_id',
models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
('connectivity',
models.CharField(choices=[('-', 'Unknown'), ('na', 'N/A'), ('ok', 'OK'), ('err', 'Error')],
default='-', max_length=16, verbose_name='Connectivity')),
models.CharField(
choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
],
default='-', max_length=16, verbose_name='Connectivity')),
('date_verified', models.DateTimeField(null=True, verbose_name='Date verified')),
('name', models.CharField(max_length=128, verbose_name='Name')),
('address', models.CharField(db_index=True, max_length=767, verbose_name='Address')),
@@ -46,7 +57,8 @@ class Migration(migrations.Migration):
('match_asset', 'Can match asset'), ('change_assetnodes', 'Can change asset nodes')],
},
bases=(
assets.models.asset.common.NodesRelationMixin, assets.models.asset.common.JSONFilterMixin, models.Model),
assets.models.asset.common.NodesRelationMixin, assets.models.asset.common.JSONFilterMixin,
models.Model),
),
migrations.CreateModel(
name='AutomationExecution',

View File

@@ -1,11 +1,11 @@
# Generated by Django 4.1.13 on 2024-05-09 03:16
import json
import assets.models.asset.common
from django.db.models import F, Q
import django.db.models.deletion
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.db.models import F
import assets.models.asset.common
class Migration(migrations.Migration):
@@ -39,22 +39,26 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='automationexecution',
name='automation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions',
to='assets.baseautomation', verbose_name='Automation task'),
),
migrations.AddField(
model_name='asset',
name='domain',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.domain', verbose_name='Zone'),
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='assets', to='assets.domain', verbose_name='Zone'),
),
migrations.AddField(
model_name='asset',
name='nodes',
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', to='assets.node', verbose_name='Nodes'),
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets',
to='assets.node', verbose_name='Nodes'),
),
migrations.AddField(
model_name='asset',
name='platform',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', to='assets.platform', verbose_name='Platform'),
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets',
to='assets.platform', verbose_name='Platform'),
),
migrations.CreateModel(
name='AssetBaseAutomation',
@@ -71,7 +75,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='GatherFactsAutomation',
fields=[
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Gather asset facts',
@@ -81,7 +87,9 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PingAutomation',
fields=[
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Ping asset',

View File

@@ -0,0 +1,57 @@
# Generated by Django 4.1.13 on 2025-04-03 09:51
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("assets", "0015_automationexecution_type"),
]
operations = [
migrations.CreateModel(
name="DirectoryService",
fields=[
(
"asset_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="assets.asset",
),
),
(
"domain_name",
models.CharField(
blank=True,
default="",
max_length=128,
verbose_name="Domain name",
),
),
],
options={
"verbose_name": "Directory service",
"default_related_name": "ds"
},
bases=("assets.asset",),
),
migrations.AddField(
model_name="platform",
name="ds_enabled",
field=models.BooleanField(default=False, verbose_name="DS enabled"),
),
migrations.AddField(
model_name="asset",
name="directory_services",
field=models.ManyToManyField(
related_name="assets",
to="assets.directoryservice",
verbose_name="Directory service",
)
),
]

View File

@@ -0,0 +1,165 @@
# Generated by Django 4.1.13 on 2025-04-07 03:24
import json
from django.db import migrations
from assets.const import AllTypes
def add_ds_platforms(apps, schema_editor):
data = """
[
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "WindowsActiveDirectory",
"category": "ds",
"type": "windows_ad",
"meta": {},
"internal": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
"automation": {
"ansible_enabled": true,
"ansible_config": {
"ansible_shell_type": "cmd",
"ansible_connection": "ssh"
},
"ping_enabled": true,
"ping_method": "ping_by_rdp",
"ping_params": {},
"gather_facts_enabled": true,
"gather_facts_method": "gather_facts_windows",
"gather_facts_params": {},
"change_secret_enabled": true,
"change_secret_method": "change_secret_ad_windows",
"change_secret_params": {
},
"push_account_enabled": true,
"push_account_method": "push_account_ad_windows",
"push_account_params": {},
"verify_account_enabled": true,
"verify_account_method": "verify_account_by_rdp",
"verify_account_params": {
},
"gather_accounts_enabled": true,
"gather_accounts_method": "gather_accounts_windows_ad",
"gather_accounts_params": {
},
"remove_account_enabled": true,
"remove_account_method": "remove_account_ad_windows",
"remove_account_params": {
}
},
"protocols": [
{
"name": "rdp",
"port": 3389,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"console": false,
"security": "any"
}
},
{
"name": "ssh",
"port": 22,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
},
{
"name": "vnc",
"port": 5900,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
}
},
{
"name": "winrm",
"port": 5985,
"primary": false,
"required": false,
"default": false,
"public": false,
"setting": {
"use_ssl": false
}
}
]
},
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "General",
"category": "ds",
"type": "general",
"meta": {
},
"internal": true,
"domain_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [
],
"automation": {
"ansible_enabled": false,
"ansible_config": {
}
},
"protocols": [
{
"name": "ssh",
"port": 22,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
}
]
}
]
"""
platform_model = apps.get_model('assets', 'Platform')
automation_cls = apps.get_model('assets', 'PlatformAutomation')
platform_datas = json.loads(data)
for platform_data in platform_datas:
AllTypes.create_or_update_by_platform_data(
platform_data, platform_cls=platform_model,
automation_cls=automation_cls
)
class Migration(migrations.Migration):
dependencies = [
("assets", "0016_directory_service"),
]
operations = [
migrations.RunPython(add_ds_platforms)
]

View File

@@ -0,0 +1,26 @@
# Generated by Django 4.1.13 on 2025-04-18 08:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("assets", "0017_auto_20250407_1124"),
]
operations = [
migrations.RenameField(
model_name="platform",
old_name="domain_enabled",
new_name="gateway_enabled",
),
migrations.RenameModel(
old_name="Domain",
new_name="Zone",
),
migrations.RenameField(
model_name="asset",
old_name="domain",
new_name="zone",
),
]

View File

@@ -0,0 +1,29 @@
# Generated by Django 4.1.13 on 2025-05-06 10:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assets', '0018_rename_domain_zone'),
]
operations = [
migrations.AlterField(
model_name='asset',
name='connectivity',
field=models.CharField(
choices=[
('-', 'Unknown'),
('na', 'N/A'),
('ok', 'OK'),
('err', 'Error'),
('rdp_err', 'RDP error'),
('auth_err', 'Authentication error'),
('password_err', 'Invalid password error'),
('openssh_key_err', 'OpenSSH key error'),
('ntlm_err', 'NTLM credentials rejected error'),
('create_temp_err', 'Create temporary error')
], default='-', max_length=16, verbose_name='Connectivity'),
),
]

View File

@@ -1,9 +1,10 @@
# noqa
from .base import *
from .platform import *
from .asset import *
from .label import Label
from .gateway import *
from .domain import *
from .zone import * # noqa
from .node import *
from .favorite_asset import *
from .automations import *

View File

@@ -3,6 +3,7 @@ from .common import *
from .custom import *
from .database import *
from .device import *
from .ds import *
from .gpt import *
from .host import *
from .web import *

View File

@@ -6,7 +6,7 @@ import logging
from collections import defaultdict
from django.db import models
from django.db.models import Q
from django.db.models import Q, Count
from django.forms import model_to_dict
from django.utils.translation import gettext_lazy as _
@@ -168,13 +168,17 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = models.ForeignKey(
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
)
domain = models.ForeignKey(
"assets.Domain", null=True, blank=True, related_name='assets',
zone = models.ForeignKey(
"assets.Zone", null=True, blank=True, related_name='assets',
verbose_name=_("Zone"), on_delete=models.SET_NULL
)
nodes = models.ManyToManyField(
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
)
directory_services = models.ManyToManyField(
'assets.DirectoryService', related_name='assets',
verbose_name=_("Directory service")
)
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
@@ -201,6 +205,10 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
info[i.name] = v
return info
@lazyproperty
def is_directory_service(self):
return self.category == const.Category.DS and hasattr(self, 'ds')
@lazyproperty
def spec_info(self):
instance = getattr(self, self.category, None)
@@ -236,7 +244,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = self.platform
auto_config = {
'su_enabled': platform.su_enabled,
'domain_enabled': platform.domain_enabled,
'gateway_enabled': platform.gateway_enabled,
'ansible_enabled': False
}
automation = getattr(self.platform, 'automation', None)
@@ -245,9 +253,28 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
auto_config.update(model_to_dict(automation))
return auto_config
@property
def all_accounts(self):
if not self.joined_dir_svcs:
queryset = self.accounts.all()
else:
queryset = self.accounts.model.objects.filter(asset__in=[self.id, *self.joined_dir_svcs])
return queryset
@property
def dc_accounts(self):
queryset = self.accounts.model.objects.filter(asset__in=[*self.joined_dir_svcs])
return queryset
@lazyproperty
def all_valid_accounts(self):
queryset = (self.all_accounts.filter(is_active=True)
.prefetch_related('asset', 'asset__platform'))
return queryset
@lazyproperty
def accounts_amount(self):
return self.accounts.count()
return self.all_accounts.count()
def get_target_ip(self):
return self.address
@@ -259,6 +286,41 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
protocol = self.protocols.all().filter(name=protocol).first()
return protocol.port if protocol else 0
def is_dir_svc(self):
return self.category == const.Category.DS
@property
def joined_dir_svcs(self):
return self.directory_services.all()
@classmethod
def compute_all_accounts_amount(cls, assets):
from .ds import DirectoryService
asset_ids = [asset.id for asset in assets]
asset_id_dc_ids_mapper = defaultdict(list)
dc_ids = set()
asset_dc_relations = (
Asset.directory_services.through.objects
.filter(asset_id__in=asset_ids)
.values_list('asset_id', 'directoryservice_id')
)
for asset_id, ds_id in asset_dc_relations:
dc_ids.add(ds_id)
asset_id_dc_ids_mapper[asset_id].append(ds_id)
directory_services = (
DirectoryService.objects.filter(id__in=dc_ids)
.annotate(accounts_amount=Count('accounts'))
)
ds_accounts_amount_mapper = {ds.id: ds.accounts_amount for ds in directory_services}
for asset in assets:
asset_dc_ids = asset_id_dc_ids_mapper.get(asset.id, [])
for dc_id in asset_dc_ids:
ds_accounts = ds_accounts_amount_mapper.get(dc_id, 0)
asset.accounts_amount += ds_accounts
return assets
@property
def is_valid(self):
warning = ''
@@ -300,11 +362,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
@lazyproperty
def gateway(self):
if not self.domain_id:
if not self.zone_id:
return
if not self.platform.domain_enabled:
if not self.platform.gateway_enabled:
return
return self.domain.select_gateway()
return self.zone.select_gateway()
def as_node(self):
from assets.models import Node

Some files were not shown because too many files have changed in this diff Show More