Compare commits

...

311 Commits
v4.6.0 ... dev

Author SHA1 Message Date
feng
a9d455e867 perf: ntlm_err 2025-04-27 18:53:43 +08:00
feng
d06d26ac54 perf: Display asset/account connectivity error message 2025-04-27 18:50:00 +08:00
fit2bot
e992c44e11
perf: change lfs files download (#15293)
* perf: change lfs files download

* perf: clean unused ansible module

* perf: update lfs download

* perf: Update Dockerfile with new base image tag

* perf: change download path

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-27 14:35:10 +08:00
feng
24fe058fd9 perf: lina translate 2025-04-25 18:07:09 +08:00
feng
a3fef9cc54 perf: Update the key when the integration-application is updated or created 2025-04-25 17:29:44 +08:00
ibuler
471053e62a perf: change mcp integrate 2025-04-25 17:28:03 +08:00
jiangweidong
dc6308b030 perf: if the apply-asset-ticket name is 128 characters long, will raise 500 2025-04-25 17:27:13 +08:00
feng
f016ae6161 perf: add sftplog command models field index 2025-04-25 15:21:55 +08:00
feng
14a8d877e0 perf: ko translate 2025-04-25 15:04:03 +08:00
feng
ddf20570a1 perf: device support ad 2025-04-23 19:38:01 +08:00
feng
1ad9616b7f perf: gather facts gpu info 2025-04-22 17:48:21 +08:00
刘瑞斌
d7bc6bb201 chore: use uv as package-ecosystem 2025-04-21 13:36:24 +08:00
feng
f855043468 perf: luna ru translate 2025-04-21 11:34:51 +08:00
fit2bot
3159a4e794
perf: change domain to zone (#15255)
* perf: change domain to zone

* perf: change domain to zone

* perf: change some word

* perf: update gateway enabled i18n

* perf: change migrations

---------

Co-authored-by: ibuler <ibuler@qq.com>
2025-04-21 10:30:18 +08:00
feng
57fcebfdd3 fix: No data found for the carrying organization 2025-04-18 16:50:07 +08:00
feng626
c500bb4e4c Revert "Revert "perf:Stored command records in ES support accurate searching.""
This reverts commit 6bc1c5bd50.
2025-04-18 16:50:07 +08:00
feng
fd062b0da6 perf: ru translate 2025-04-18 14:52:44 +08:00
ibuler
bcb112d5c6 perf: user profile api 2025-04-18 14:11:56 +08:00
fit2bot
533dbf316c
perf: add ali rds dependencies (#15247)
* perf: add ali rds dependencies

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: Eric <xplzv@126.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-18 12:11:51 +08:00
github-actions[bot]
9cce94b709 perf: Update Dockerfile with new base image tag 2025-04-18 11:53:40 +08:00
Eric
8b815d812b perf: modify Dockerfile-base 2025-04-18 11:53:40 +08:00
github-actions[bot]
a168fc8a62 perf: Update Dockerfile with new base image tag 2025-04-18 11:29:07 +08:00
Eric
faae1a09d1 perf: lint dependencies 2025-04-18 11:29:07 +08:00
github-actions[bot]
26e819e120 perf: Update Dockerfile with new base image tag 2025-04-18 11:29:07 +08:00
Eric
79579654a1 perf: use uv tool
perf: add Homepage
perf: add env
2025-04-18 11:29:07 +08:00
feng626
6bc1c5bd50 Revert "perf:Stored command records in ES support accurate searching."
This reverts commit 3d6d2af268.
2025-04-17 20:16:06 +08:00
ibuler
36f312b943 perf: page queryset mixin 2025-04-17 19:47:51 +08:00
ibuler
11811c453b perf: page queryset mixin 2025-04-17 19:47:51 +08:00
ibuler
12fadeec58 perf: revert terminal api 2025-04-17 19:34:23 +08:00
ibuler
b49fd21e08 perf: 虚拟账号 api 2025-04-17 17:50:42 +08:00
feng
9b982eb592 perf: change secret api perm 2025-04-17 17:15:06 +08:00
wangruidong
31652ef5b1 fix: include openid in source validation logic 2025-04-17 16:24:00 +08:00
feng
8fef18b991 perf: gather account windows playbook failed_when: false 2025-04-17 15:55:49 +08:00
ibuler
c804c053d2 perf: revert api 2025-04-17 15:53:52 +08:00
ibuler
bef2282604 perf: asset list compute account amount 2025-04-17 15:15:36 +08:00
feng
cabc069045 perf: Translate 2025-04-17 15:12:16 +08:00
feng
99c9a021b7 fix: update applet host (platform failed) 2025-04-17 14:55:36 +08:00
ibuler
6cb3cc1f29 perf: 修改 DS 的一些翻译 2025-04-17 14:47:11 +08:00
feng
67422ef4ba fix: automation no account 2025-04-17 14:00:24 +08:00
gerry
3d6d2af268 perf:Stored command records in ES support accurate searching. 2025-04-17 11:43:16 +08:00
wangruidong
ee97e45cc3 fix: Allow superusers delete adhoc and playbook 2025-04-17 10:54:47 +08:00
feng
0131eaa6db perf: es search 2025-04-16 18:15:58 +08:00
feng
eaa390fd6f perf: update asset directory_services allow_empty true 2025-04-16 17:39:55 +08:00
ibuler
e2b8fd0d40 perf: change account filter by asset 2025-04-16 17:37:36 +08:00
feng
2aace05099 perf: as account username 2025-04-16 17:29:23 +08:00
ibuler
1ee70af93d perf: applet account select 2025-04-16 16:43:34 +08:00
feng
fa70fb2921 perf: Translate 2025-04-16 15:37:11 +08:00
ibuler
01a6019022 perf: swagger api 2025-04-16 14:53:51 +08:00
wangruidong
5c61a11d82 fix: add periodic_display to read_only_fields in Job serializer 2025-04-16 14:18:11 +08:00
fit2bot
67f3341310
perf: change db prefetch (#15215) 2025-04-16 13:48:12 +08:00
feng
cb49e26387 perf: refresh asset type tree 2025-04-16 11:44:07 +08:00
feng
314da330c0 perf: Asset account filter 2025-04-16 11:36:58 +08:00
halo
f1c98fda34 perf: client version 2025-04-16 10:39:10 +08:00
ibuler
1fdd1036d3 perf: directory service db 2025-04-15 20:24:10 +08:00
feng
e286997090 perf: koko translate 2025-04-15 17:26:06 +08:00
wangruidong
ce3daf5496 fix: update translation strings and improve error handling in inventory and job modules 2025-04-15 16:49:35 +08:00
feng
631570b819 perf: Asset filter 2025-04-15 16:45:50 +08:00
feng
9b1bff0847 perf: client version 2025-04-15 12:28:59 +08:00
feng
ee8a2afe16 fix: ES no data found 2025-04-15 12:07:56 +08:00
CaptainB
1a01c0537c chore: Add Dependabot configuration for pip dependencies 2025-04-15 11:47:59 +08:00
jiangweidong
64393fe695 fix: Error in using set method 2025-04-15 11:47:01 +08:00
jiangweidong
11ef4fab4e perf: Es subsequent optimization 2025-04-15 11:47:01 +08:00
jiangweidong
9f8256f885 fix: Solve the problem that log details cannot be viewed in non-default organizations 2025-04-15 11:47:01 +08:00
fit2bot
5390fbacec
perf: some swagger api (#15203)
* perf: some swagger api

* perf: update deps

* perf: Update Dockerfile with new base image tag

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
2025-04-15 11:43:36 +08:00
wangruidong
8b9fe3c72b perf: add default logo handling in IntegrationApplicationSerializer 2025-04-15 11:08:15 +08:00
wangruidong
20070e0647 fix: improve crontab validation logic 2025-04-15 11:07:23 +08:00
ibuler
47b72cb35e perf: add leak password db config 2025-04-14 17:27:27 +08:00
feng
2ca0e9a5a2 perf: automation .account -> .all_accounts 2025-04-14 14:18:56 +08:00
feng
3b2ac101c8 perf: windows ad asset info 2025-04-14 10:14:38 +08:00
feng
6795f036dd perf: ad asset automation gather_facts_enabled true 2025-04-11 17:53:43 +08:00
feng
aaa1f48258 perf: koko translate 2025-04-11 17:45:58 +08:00
feng
53c5bab203 perf: user login acl remove warning notify_and_warn action 2025-04-10 15:56:31 +08:00
feng
1254d28463 perf: windows ad gather account 2025-04-10 15:33:42 +08:00
wangruidong
d6b1a577fc fix: resolve ForeignKeyViolation in AccountRisk 2025-04-09 19:30:05 +08:00
wangruidong
5ab85d3561 perf: File directory adjustment 2025-04-09 19:29:52 +08:00
wangruidong
467f4c5d4f perf: Translate 2025-04-09 19:29:52 +08:00
wangruidong
f2404319af fix: account_prefer 2025-04-09 19:29:52 +08:00
wangruidong
bbeadf7dbe perf: optimize adhoc asset selection experience 2025-04-09 19:29:52 +08:00
feng
941bd9b3f4 perf: Translate 2025-04-09 19:22:32 +08:00
feng
37a307a9d0 perf: Windows AD 2025-04-09 18:22:00 +08:00
ibuler
528f9045d0 perf: update connection token 2025-04-09 11:11:39 +08:00
ibuler
a317549a01 perf: migrations merge 2025-04-08 19:21:37 +08:00
ibuler
0f5681de7d chore: remove workflow 2025-04-08 19:21:37 +08:00
ibuler
a7c514f8d8 perf: rename some workd 2025-04-08 19:21:37 +08:00
ibuler
75ea0079a2 perf: update ad domain 2025-04-08 19:21:37 +08:00
ibuler
4cc1687bf8 perf: update ad 2025-04-08 19:21:37 +08:00
ibuler
76e57b9a3e perf: update ad 2025-04-08 19:21:37 +08:00
ibuler
ba3bce1e2e perf: perm account valid 2025-04-08 19:21:37 +08:00
ibuler
45f0343cfa perf: update ds 2025-04-08 19:21:37 +08:00
ibuler
acaa4cf2d5 perf: rename ad to ds 2025-04-08 19:21:37 +08:00
ibuler
3f452daee8 perf: ad as asset 2025-04-08 19:21:37 +08:00
feng
5e25361ee8 perf: Operate default log 2025-04-08 18:37:25 +08:00
jiangweidong
7b7604e14d Added cloud sync global released asset tab (v4.9) 2025-04-08 18:31:53 +08:00
Eric
f9037878c3 perf: add remoteapp bitmapcache settings 2025-04-08 17:36:48 +08:00
fit2bot
29ddfcac17
fix: Optimize UserConfirmDialog to send code via email (#15164)
* fix: Optimize UserConfirmDialog to send code via email

* fix: Optimize verification failure without error reporting

---------

Co-authored-by: halo <wuyihuangw@gmail.com>
Co-authored-by: Bryan <jiangjie.bai@fit2cloud.com>
2025-04-08 15:50:15 +08:00
wangruidong
519ec65ad4 perf: LDAP strict sync 2025-04-08 15:43:15 +08:00
jiangweidong
1f60e328b6 perf: Export resources to add operation logs 2025-04-08 15:37:29 +08:00
Bai
e8e0ea920b perf: change jumpserver.org to jumpserver.com 2025-04-08 14:23:28 +08:00
Aaron3S
4fd8efd043 feat: remove oracle dyn port 2025-04-08 13:50:30 +08:00
wangruidong
623c800d31 fix: failure when deleting remote account in SQL Server 2025-04-08 13:48:48 +08:00
wangruidong
d2c6e3c7a6 fix: Job audit: Search job list, filter failures based on command 2025-04-08 13:41:59 +08:00
github-actions[bot]
dc5883576d Auto-translate README 2025-04-08 13:34:21 +08:00
Bai
0a9c9fb227 perf: change readme 2025-04-08 13:26:48 +08:00
Bai
15a1a58eca perf: change support email 2025-04-08 13:20:22 +08:00
halo
782401ef86 fix: Implement function _check_code 2025-04-07 16:20:43 +08:00
maninhill
8abcd201bc chore: Update README.md 2025-04-03 15:11:38 +08:00
Bryan
cdbc10ac72 Update README.md 2025-03-31 17:37:01 +08:00
Bryan
ceeef890e6 Update README.md 2025-03-31 17:37:01 +08:00
Bryan
dc8a172884 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
62115e43bb Update README.md 2025-03-28 17:53:22 +08:00
Bryan
5eced85e69 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
ec99b17b76 Update README.md 2025-03-28 17:53:22 +08:00
Bryan
84569720c3 Update README.md 2025-03-28 17:53:22 +08:00
feng
65984d38f1 perf: Account filter 2025-03-28 16:34:53 +08:00
feng
f6913ac63c perf: Priacy mode 2025-03-27 18:34:11 +08:00
Halo
514b2cdfc5
feat: Email as a method for multi-factor authentication (#15134)
* feat: Email as a method for multi-factor authentication

* perf: Optimize the MFA email sending logic

* perf: Optimize some parameters

* perf: Translate
2025-03-27 17:26:38 +08:00
ibuler
b55000663e perf: 提升服务注册安全性 2025-03-27 16:25:28 +08:00
ibuler
9ed822bb3e perf: 优化获取 labels 2025-03-27 16:23:14 +08:00
feng
ea599d7695 perf: Perm the template push account 2025-03-27 14:13:29 +08:00
feng
01c5d68b35 perf: change secret change_secret_result 2025-03-27 14:08:19 +08:00
feng
2e2c331941 perf: translate 2025-03-25 18:03:10 +08:00
feng
266ea9b858 perf: Change secret 2025-03-25 16:05:47 +08:00
feng
5f2e838342 perf: koko sftp translate 2025-03-25 14:33:50 +08:00
ZhaoJiSen
544ad5532b
Merge pull request #15117 from jumpserver/pr@dev@koko_translate
perf: koko translate
2025-03-25 11:02:08 +08:00
feng
d22d715ee7 perf: koko translate 2025-03-25 10:59:45 +08:00
halo
dd2366532c perf: Use a domain account to avoid automatically creating a local account 2025-03-25 10:22:16 +08:00
ibuler
9667a3d340 perf: add crontab check min 60m 2025-03-25 09:56:37 +08:00
feng
c8e6e5d38c perf: Login language 2025-03-24 18:55:40 +08:00
feng
9d1047fae2 perf: Translate 2025-03-24 16:51:49 +08:00
feng
28f97d746d perf: Translate 2025-03-24 15:39:37 +08:00
ibuler
be72344c63 perf: update tk create 2025-03-24 10:54:17 +08:00
ibuler
d3176b68a8 perf: 优化 admin token 判断 2025-03-24 10:06:22 +08:00
ibuler
5411f65546 perf: update get permed account 2025-03-24 09:51:46 +08:00
ibuler
e3ba468004 fix: 修复 token 直连的问题 2025-03-24 09:51:46 +08:00
feng
a03a11efa4 perf: Translate 2025-03-21 15:49:42 +08:00
feng
d344495417 perf: Translate 2025-03-21 14:24:37 +08:00
github-actions[bot]
9412bd0331 Auto-translate README 2025-03-21 13:29:01 +08:00
Bai
8d73ddb1cd perf: update readme languages 2025-03-21 13:17:56 +08:00
Bai
7fe56a5e1a perf: README 2025-03-21 10:35:27 +08:00
wangruidong
db259d4e8b fix: Failed to delete account 2025-03-20 17:01:43 +08:00
feng
afc31ee5ce perf: translate 2025-03-20 15:33:53 +08:00
feng
69cc47e0cb perf: account backup 2025-03-20 13:25:40 +08:00
wangruidong
2455afc2d2 fix: Use ConfirmOrIgnore for confirmed status in account updates 2025-03-20 11:14:16 +08:00
Chenyang Shen
33cb793c19
Merge pull request #15078 from jumpserver/pr@dev@fix_face_vector_diabled
fix: fix face live can not enable
2025-03-19 20:47:39 +08:00
Aaron3S
5522ba0241 fix: fix face live can not enable 2025-03-19 20:45:35 +08:00
wangruidong
0fed338277 fix: Change secret failed, no module named libs 2025-03-19 20:23:02 +08:00
feng
8f78919b3e perf: Change secret filter days 2025-03-19 18:56:23 +08:00
wangruidong
f6fc9b1f5c fix: Update account status to confirmed 2025-03-19 18:30:09 +08:00
jiangweidong
365dbf6e14 fix: CTYun cannot sync 2025-03-19 17:59:43 +08:00
wangruidong
e5a5d5f727 fix: Account filter error 2025-03-19 16:34:42 +08:00
wangruidong
e0d6b843ee fix: Add account_deleted_accounts to RiskSummary 2025-03-19 15:25:47 +08:00
wangruidong
2a31a7d444 fix: Validate input data for account status updates 2025-03-19 14:45:17 +08:00
wangruidong
e68d5564c6 fix: Ansible could not initialize the preferred locale 2025-03-19 11:21:15 +08:00
feng
4fdb049c9d perf: translate 2025-03-19 11:07:42 +08:00
wangruidong
e5f66c4be2 fix: password_expired account, not listed as risk account 2025-03-18 19:19:51 +08:00
wangruidong
325edfe704 fix: long_time_no_login account, not listed as risk account 2025-03-18 18:59:23 +08:00
feng626
3a93aeb155
Merge pull request #15064 from jumpserver/pr@dev@license
perf: License
2025-03-18 18:56:50 +08:00
feng626
28d6f2f9ef
Merge branch 'dev' into pr@dev@license 2025-03-18 18:56:41 +08:00
feng
2bfcebd064 perf: License 2025-03-18 18:54:12 +08:00
老广
6f2cb9b3c6
Merge pull request #15063 from jumpserver/pr@dev@add_delete_account_action
perf: update some 18n
2025-03-18 17:52:05 +08:00
fit2bot
cdebfd8121
perf: add delete account action (#15059) 2025-03-18 16:57:27 +08:00
ibuler
57d05e6ff0 perf: update some 18n 2025-03-18 16:56:46 +08:00
ibuler
49378d1f13 perf: update ansible env set 2025-03-18 16:32:59 +08:00
wangruidong
e802e145af fix: Create ssh_key failed 2025-03-18 16:23:01 +08:00
ibuler
2faad88b78 perf: remove ga also 2025-03-18 16:07:49 +08:00
ibuler
33a47139b3 perf: ansible env to jms 2025-03-18 16:02:33 +08:00
wangruidong
50faa3242a perf: Translate 2025-03-18 14:51:04 +08:00
ibuler
d89164db63 perf: add delete account action 2025-03-18 14:38:54 +08:00
ibuler
f72fc19ba6 perf: change Command 2025-03-18 13:48:14 +08:00
feng
7bd03c7863 perf: change secret filter 2025-03-18 13:29:36 +08:00
wangruidong
ed95a89a77 fix: Create ssh_key set is_active error 2025-03-18 11:29:40 +08:00
Chenyang Shen
428a4470c9
Merge pull request #15054 from jumpserver/pr@dev@feat_add_translate
feat: add translate
2025-03-18 10:44:00 +08:00
Aaron3S
b206e751da feat: add translate 2025-03-18 10:40:06 +08:00
ibuler
6913518046 perf: update automation report template 2025-03-17 19:21:52 +08:00
wangruidong
f5a2f5e538 fix: Check authorized_keys change failed 2025-03-17 19:18:34 +08:00
feng
f4fa153ffa perf: push account secret 2025-03-17 18:45:25 +08:00
feng626
c70d7f709f
Merge pull request #15047 from jumpserver/pr@dev@excution
perf: Automation execution
2025-03-17 17:28:37 +08:00
feng626
ec95144907
Merge branch 'dev' into pr@dev@excution 2025-03-17 17:28:03 +08:00
feng
424ef4d9a5 perf: Automation execution 2025-03-17 17:26:09 +08:00
wangruidong
40bbc4a02c perf: Translate ExecuteAfterSaving 2025-03-17 16:32:39 +08:00
wangruidong
a784ca29c3 fix: Update counter naming for group and sudo changes in RiskSummary 2025-03-17 15:58:37 +08:00
fit2bot
b7820c6a5b
perf: update automation report 2025-03-17 10:31:49 +08:00
feng
7bac1b42e4 perf: risk add account 2025-03-14 17:41:13 +08:00
feng
0a6757946a perf: translate 2025-03-14 17:02:39 +08:00
wangruidong
e4d169cabe perf: migrations type length 2025-03-14 15:09:06 +08:00
wangruidong
c80ca5236d perf: migrations bulk_size 2025-03-14 14:57:16 +08:00
wangruidong
f513eb62a6 perf: migrations automationexecution type 2025-03-14 14:46:20 +08:00
wangruidong
94e8c62953 fix: Allow empty files in FileSerializer 2025-03-13 18:54:20 +08:00
feng
bd783e6a8d perf: asset perm expire 2025-03-13 18:25:28 +08:00
wangruidong
35bc3a0e2b fix: Account risk search failed 2025-03-13 18:21:51 +08:00
ibuler
996bee3afd perf: update gathered account sync 2025-03-13 18:03:16 +08:00
feng
7c4931b6af perf: execution type 2025-03-13 17:54:02 +08:00
wangruidong
9992fb35be perf: Translate IpGroup 2025-03-13 17:10:09 +08:00
wangruidong
cc63c956cb fix: Delete gather account failed 2025-03-13 16:25:27 +08:00
feng
15919085bc perf: change secret push record list 2025-03-13 14:58:11 +08:00
wangruidong
9e4b82bf45 perf: Translate risk handle 2025-03-13 10:00:31 +08:00
wangruidong
28f85a0186 perf: BaseManager email subject translate 2025-03-12 19:05:23 +08:00
feng
30e64ecfc1 perf: change secret 2025-03-12 18:57:03 +08:00
feng
042c3d1ba8 perf: gather account node_id filter 2025-03-12 18:05:42 +08:00
feng
94a8122eac perf: report 2025-03-12 17:57:15 +08:00
feng
88450d796f perf: translate 2025-03-12 16:45:19 +08:00
wangruidong
3d28e255c0 fix: Account risk page change secret failed 2025-03-12 16:01:42 +08:00
wangruidong
153be1508f perf: Remove account_deleted, password_error, no_admin_account others risk 2025-03-12 15:40:54 +08:00
ibuler
6e83420e67 perf: update i18n 2025-03-12 15:19:34 +08:00
wangruidong
e03d983020 fix: Gather account new-found risk failed 2025-03-12 14:54:15 +08:00
feng
0afc1e6f5b perf: translate 2025-03-12 11:02:40 +08:00
wangruidong
a9ea801862 fix: Update username and address_last_login field length in account models 2025-03-12 10:36:35 +08:00
Bai
23f9b79142 feat: support Spanish and Russian 2025-03-12 10:24:34 +08:00
ibuler
4db15d9af7 perf: update action choice field default 2025-03-12 10:23:01 +08:00
ZhaoJiSen
bc9782bd55
Merge pull request #15001 from jumpserver/pr@dev@account_risk_filter
perf: account filter node
2025-03-11 19:20:14 +08:00
feng
635e9f5079 perf: account filter node 2025-03-11 19:19:14 +08:00
Eric_Lee
71259886fe
Merge pull request #14834 from jumpserver/pr@dev@perf_replay_file
perf: use a custom local path to save session replay file
2025-03-11 19:12:42 +08:00
feng626
a26cc7ce1f
Merge pull request #15000 from jumpserver/pr@dev@connectiontoken
perf: connection token
2025-03-11 18:34:35 +08:00
feng
a2aa5e9bf9 perf: connection token 2025-03-11 18:32:38 +08:00
ZhaoJiSen
82de373f8e
Merge pull request #14998 from jumpserver/pr@dev@chage_secret
perf: change secret drop bulk record
2025-03-11 17:39:45 +08:00
feng
59e7778e4a perf: change secret drop bulk record 2025-03-11 17:37:04 +08:00
feng626
60eaec68c6
Merge pull request #14996 from jumpserver/pr@dev@fix_long_time_no_login_risk
fix: long_time_no_login account, not listed as risk account
2025-03-11 16:40:31 +08:00
wangruidong
43973122bf fix: Remove the empty contents of the log 2025-03-11 16:39:08 +08:00
wangruidong
8668955d4a fix: long_time_no_login account, not listed as risk account 2025-03-11 16:06:47 +08:00
ZhaoJiSen
5571fb6f42
Merge pull request #14993 from jumpserver/pr@dev@report
perf: report css
2025-03-11 15:10:55 +08:00
feng
62b9b6883e perf: report css 2025-03-11 15:04:40 +08:00
老广
7af1c6a2bb
Merge pull request #14991 from jumpserver/pam
fix: OIDC PKCE S256
2025-03-11 11:02:09 +08:00
Bai
3e96c2fe79 fix: OIDC PKCE S256 2025-03-11 09:14:23 +08:00
feng626
3a1d3c1f5c
Merge pull request #14990 from jumpserver/pam
Pam
2025-03-10 19:14:00 +08:00
zhaojisen
9466c7105c Perf: Perf All Report 2025-03-10 17:39:36 +08:00
feng
a3467bdabc perf: change secret 2025-03-10 17:34:20 +08:00
feng
4c547215aa perf: account integrationapplication filter 2025-03-10 16:37:53 +08:00
ibuler
2580e7a712 perf: update accounts migrations 2025-03-10 16:20:42 +08:00
ibuler
038d93e318 Merge branch 'pam' of github.com:jumpserver/jumpserver into pam 2025-03-10 15:25:46 +08:00
ibuler
f46cc0f040 perf: update user serializer 2025-03-10 15:24:41 +08:00
feng
423d6db2ac perf: change record dashboard 2025-03-10 14:48:18 +08:00
wangruidong
243083e876 perf: Translate application detail 2025-03-10 14:26:21 +08:00
wangruidong
c84bc52c70 perf: Translate check account 2025-03-10 10:43:12 +08:00
wangruidong
090ad0ba83 fix: Add drf filter set to SecretRecordMixin 2025-03-07 18:17:13 +08:00
feng
033750f108 perf: execution automation ObjectRelatedField 2025-03-07 17:05:12 +08:00
feng
405344de74 perf: Automation 2025-03-07 16:54:20 +08:00
fit2bot
763e67bd1d
perf: Integrate authentication to update user attribute logic (#14979) 2025-03-07 16:25:28 +08:00
wangruidong
e8c581b08a fix: Integrations Application: Failed to filter accounts by selecting multiple attributes when creating or editing 2025-03-07 14:45:49 +08:00
feng
47029be3da perf: Change secret 2025-03-06 19:15:16 +08:00
wangruidong
bc70c480f7 fix: Integration Application: No Java example in the help docs 2025-03-06 17:45:48 +08:00
jiangweidong
d2d9d3d841 fix: Slove the problem that the third-party auth cannot update user name 2025-03-06 17:02:56 +08:00
ibuler
56d0a6d0b0 Merge branch 'pam' of github.com:jumpserver/jumpserver into pam 2025-03-06 10:23:26 +08:00
ibuler
060e8ace70 perf: update i18n 2025-03-06 10:22:32 +08:00
feng
821622638a perf: translate 2025-03-05 17:30:26 +08:00
老广
51c67f0e9d
Merge pull request #14968 from jumpserver/pr@dev@pam
perf: update i18n
2025-03-04 19:19:38 +08:00
ibuler
70c8db839e perf: update i18n 2025-03-04 19:13:20 +08:00
ibuler
c9208d2cf7 Merge branch 'pam' of github.com:jumpserver/jumpserver into pam 2025-03-04 15:46:52 +08:00
ibuler
58177fdfb0 perf: update some i18n 2025-03-04 15:46:45 +08:00
wangruidong
76dd2f8153 fix: Creating an integration application with the same name results in a server error. 2025-03-03 18:28:09 +08:00
wangruidong
edd998da20 fix: Premailer failed to parse the report HTML. 2025-03-03 16:46:43 +08:00
feng
8b2276ce08 perf: koko sftp superuser perm 2025-03-03 16:17:16 +08:00
wangruidong
f42f46ffe4 fix: User list: some quick filters are ineffective. 2025-03-03 14:56:32 +08:00
ibuler
b2f8f9d248 perf: update pam i18n 2025-03-03 14:46:54 +08:00
ibuler
8105681304 perf: update i18n 2025-03-03 14:44:52 +08:00
feng
b4d3f6099a perf: translate 2025-03-03 10:52:21 +08:00
feng
9b48da11dc perf: translate 2025-02-28 18:55:35 +08:00
feng
904154a62b perf: translate 2025-02-28 18:50:18 +08:00
feng
7181dad5ad perf: Translate 2025-02-28 18:40:47 +08:00
jiangweidong
2a250d13d8 fix: Solve the problem that some messages cannot be sent from unauthenticated email 2025-02-28 17:44:36 +08:00
feng
caa1e2e1ac perf: Translate 2025-02-28 17:05:36 +08:00
wangruidong
5b044299b9 fix: Back account translate 2025-02-28 14:55:42 +08:00
wangruidong
d48a12e266 perf: Translate 2025-02-28 11:04:29 +08:00
feng
35ec9dc2f4 perf: Translate 2025-02-27 18:50:06 +08:00
老广
33f3281a1f
Merge pull request #14943 from jumpserver/pr@dev@update_pam
merge: with dev
2025-02-27 17:14:59 +08:00
ibuler
93810fb0db merge: with dev 2025-02-27 17:05:35 +08:00
ibuler
8fc2b86189 Merge branch 'pam' of github.com:jumpserver/jumpserver into pam 2025-02-27 17:00:00 +08:00
ibuler
6217733aba perf: update some i18n 2025-02-27 16:59:52 +08:00
feng
4b4d7b6787 perf: During MFA authentication, if the current code has been used and successfully authenticated, it cannot be used again for authentication 2025-02-27 15:35:56 +08:00
ibuler
acbc3e1e44 Merge branch 'pam' of github.com:jumpserver/jumpserver into pam 2025-02-27 13:27:43 +08:00
ibuler
110b3a334d perf: update i18n 2025-02-27 13:27:35 +08:00
wangruidong
a498b22e80 perf: Update account change secret status and date on successful secret change 2025-02-26 16:20:39 +08:00
wangruidong
385bf47b11 perf: Add last login date update for account on session save 2025-02-26 16:20:39 +08:00
ibuler
16400082e7 perf: update i18n 2025-02-26 15:34:04 +08:00
wangruidong
cbc009cb3f fix: Return Response object for successful account risk handling 2025-02-26 10:31:33 +08:00
feng
919cdeae20 perf: report to iframe 2025-02-25 19:14:13 +08:00
feng
456b96a369 perf: Translate 2025-02-25 14:49:22 +08:00
wangruidong
ff9ad2680f perf: Update confirmation backend definitions to use ConfirmType 2025-02-25 10:53:19 +08:00
Bai
96a0cbc35d fix: import error 2025-02-25 10:53:19 +08:00
feng
08963ebb40 perf: Translate 2025-02-24 19:26:34 +08:00
wangruidong
908e6cb81a fix: Migrate account backup automation and execution data 2025-02-24 18:50:06 +08:00
feng
c206f5d09c perf: user filter error 2025-02-24 15:33:23 +08:00
github-actions[bot]
f0c7a7508b perf: Update Dockerfile with new base image tag 2025-02-24 15:09:38 +08:00
Bai
d386189c77 fix: pyproject.toml 2025-02-24 15:09:38 +08:00
ibuler
f0fa381b9f perf: update action run on 2025-02-24 14:51:53 +08:00
Bai
8b2af5ee0a fix: make messages errors 2025-02-24 14:28:31 +08:00
ibuler
0541c0a9d4 perf: squash migrations 2025-02-24 14:08:47 +08:00
feng
e4c7eb8035 perf: Account filter 2025-02-24 11:50:48 +08:00
Bai
e33aeaa338 fix: migrations files modify 2025-02-24 11:26:20 +08:00
feng
1282fffff7 perf: Change secret 2025-02-21 17:46:56 +08:00
fit2bot
3f4141ca0b
merge: with pam (#14911)
* perf: change i18n

* perf: pam

* perf: change translate

* perf: add check account

* perf: add date field

* perf: add account filter

* perf: remove some js

* perf: add account status action

* perf: update pam

* perf: 修改 discover account

* perf: update filter

* perf: update gathered account

* perf: 修改账号同步

* perf: squash migrations

* perf: update pam

* perf: change i18n

* perf: update account risk

* perf: 更新风险发现

* perf: remove css

* perf: Admin connection token

* perf: Add a switch to check connectivity after changing the password, and add a custom ssh command for push tasks

* perf: Modify account migration files

* perf: update pam

* perf: remove to check account dir

* perf: Admin connection token

* perf: update check account

* perf: 优化发送结果

* perf: update pam

* perf: update bulk update create

* perf: prepaire using thread timer for bulk_create_decorator

* perf: update bulk create decorator

* perf: 优化 playbook manager

* perf: 优化收集账号的报表

* perf: Update poetry

* perf: Update Dockerfile with new base image tag

* fix: Account migrate 0012 file

* perf: 修改备份

* perf: update pam

* fix: Expand resource_type filter to include raw type

* feat: PAM Service (#14552)

* feat: PAM Service

* perf: import package name

---------

Co-authored-by: jiangweidong <1053570670@qq.com>

* perf: Change secret dashboard (#14551)

Co-authored-by: feng <1304903146@qq.com>

* perf: update migrations

* perf: 修改支持 pam

* perf: Change secret record table dashboard

* perf: update status

* fix: Automation send report

* perf: Change secret report

* feat: windows accounts gather

* perf: update change status

* perf: Account backup

* perf: Account backup report

* perf: Account migrate

* perf: update service to application

* perf: update migrations

* perf: update logo

* feat: oracle accounts gather (#14571)

* feat: oracle accounts gather

* feat: sqlserver accounts gather

* feat: postgresql accounts gather

* feat: mysql accounts gather

---------

Co-authored-by: wangruidong <940853815@qq.com>

* feat: mongodb accounts gather

* perf: Change secret

* perf: Migrate

* perf: Merge conflicting migration files

* perf: Change secret

* perf: Automation filter org

* perf: Account push

* perf: Random secret string

* perf: Enhance SQL query and update risk handling in accounts

* perf: Ticket filter assignee_id

* perf: 修改 account remote

* perf: 修改一些 adhoc 任务

* perf: Change secret

* perf: Remove push account extra api

* perf: update status

* perf: The entire organization can view activity log

* fix: risk field check

* perf: add account details api

* perf: add demo mode

* perf: Delete gather_account

* perf: Perfect solution to account version problem

* perf: Update status action to handle multiple accounts

* perf: Add GatherAccountDetailField and update serializers

* perf: Display account history in combination with password change records

* perf: Lina translate

* fix: Update mysql_filter to handle nested user info

* perf: Admin connection token validate_permission account

* perf: copy move account

* perf: account filter risk

* perf: account risk filter

* perf: Copy move account failed message

* fix: gather account sync account to asset

* perf: Pam dashboard

* perf: Account dashboard total accounts

* perf: Pam dashboard

* perf: Change secret filter account secret_reset

* perf: 修改 risk filter

* perf: pam translate

* feat: Check for leaked duplicate passwords. (#14711)

* feat: Check for leaked duplicate passwords.

* perf: Use SQLite instead of txt as leak password database

---------

Co-authored-by: jiangweidong <1053570670@qq.com>
Co-authored-by: 老广 <ibuler@qq.com>

* perf: merge with remote

* perf: Add risk change_password_add handle

* perf: Pam dashboard

* perf: check account manager import

* perf: 重构扫描

* perf: 修改 db

* perf: Gather account manager

* perf: update change db lib

* perf: dashboard

* perf: Account gather

* perf: 修改 asset get queryset

* perf: automation report

* perf: Pam account

* perf: Pam dashboard api

* perf: risk add account

* perf: 修改 risk check

* perf: Risk account

* perf: update risk add reopen action

* perf: add pylintrc

* Revert "perf: automation report"

This reverts commit 22aee54207.

* perf: check account engine

* perf: Perf: Optimism Gather Report Style

* Perf: Remove unuser actions

* Perf: Perf push account

* perf: perf gather account

* perf: Automation report

* perf: Push account recorder

* perf: Push account record

* perf: Pam dashboard

* perf: perf

* perf: update intergration

* perf: integrations application detail add account tab page

* feat: Custom change password supports configuration of interactive items

* perf: Go and Python demo code

* perf: Custom secret change

* perf: add user filter

* perf: translate

* perf: Add demo code docs

* perf: update some i18n

* perf: update some i18n

* perf: Add Java, Node, Go, and cURL demo code

* perf: Translate

* perf: Change secret translate

* perf: Translate

* perf: update some i18n

* perf: translate

* perf: Ansible playbook

* perf: update some choice

* perf: update some choice

* perf: update account serializer remote unused code

* perf: conflict

* perf: update import

---------

Co-authored-by: ibuler <ibuler@qq.com>
Co-authored-by: feng <1304903146@qq.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: wangruidong <940853815@qq.com>
Co-authored-by: jiangweidong <1053570670@qq.com>
Co-authored-by: feng626 <57284900+feng626@users.noreply.github.com>
Co-authored-by: zhaojisen <1301338853@qq.com>
2025-02-21 16:39:57 +08:00
wangruidong
d516349a68 perf: Translate 2025-02-20 18:24:27 +08:00
Chenyang Shen
8730fa8dee
Merge pull request #14900 from jumpserver/pr@dev@translate
perf: translate
2025-02-19 19:26:09 +08:00
Aaron3S
9a5a775652 feat: add luna i18n 2025-02-19 19:17:21 +08:00
feng
8304ae9070 perf: translate 2025-02-19 16:27:10 +08:00
wangruidong
9533861e24 fix: VariableSerializer allow null in default_value 2025-02-18 15:26:11 +08:00
Aaron3S
abbfbcde83 feat: add i18n 2025-02-18 15:10:13 +08:00
wangruidong
046a9d41bf fix: Removing labels from asset details will cause an error. 2025-02-17 18:21:28 +08:00
fit2bot
363bb20da7
feat: Chen i18n (#14851)
Co-authored-by: jiangweidong <1053570670@qq.com>
2025-02-17 11:25:54 +08:00
wangruidong
2b7c8b9c07 fix: Upgrading v3 to v4 generates two ticket flow 2025-02-14 16:10:20 +08:00
Aaron3S
db040bbd06 feat: add translate 2025-02-14 11:30:56 +08:00
feng
a761ec9aa1 perf: Translate 2025-02-13 16:55:09 +08:00
feng
c0ffe45ce9 perf: deepseek 2025-02-13 16:40:29 +08:00
wangruidong
404d58a9c9 fix: When the organization does not exist, close ticket with an error. 2025-02-13 16:31:47 +08:00
feng
f64eab7a15 perf: Translate 2025-02-13 11:48:59 +08:00
feng
46f94fd138 perf: Chat ai help text 2025-02-13 11:30:07 +08:00
wangruidong
2f1c0090b7 fix: markdown render issue 2025-02-12 15:48:05 +08:00
feng
b0d6a09276 perf: translate 2025-02-10 19:15:39 +08:00
feng
d8db76cc7b perf: DeepSeek 2025-02-08 15:40:24 +08:00
wangruidong
b35a55ed54 fix: Cannot set original org when exception occurs 2025-02-08 11:13:14 +08:00
Bai
dc5ecfcc4b fix: setting field encrypt issue 2025-02-06 17:14:10 +08:00
Eric
594d5b8128 perf: use a custom local path to save session replay file 2025-01-23 15:55:43 +08:00
656 changed files with 68420 additions and 30992 deletions

View File

@ -8,4 +8,6 @@ celerybeat.pid
.vagrant/
apps/xpack/.git
.history/
.idea
.idea
.venv/
.env

4
.gitattributes vendored
View File

@ -1,4 +0,0 @@
*.mmdb filter=lfs diff=lfs merge=lfs -text
*.mo filter=lfs diff=lfs merge=lfs -text
*.ipdb filter=lfs diff=lfs merge=lfs -text

10
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,10 @@
version: 2
updates:
- package-ecosystem: "uv"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:30"
timezone: "Asia/Shanghai"
target-branch: dev

View File

@ -22,7 +22,7 @@ on:
jobs:
build-and-push:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
steps:
- name: Checkout repository
uses: actions/checkout@v4
@ -31,6 +31,8 @@ jobs:
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
with:
image: tonistiigi/binfmt:qemu-v7.0.0-28
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

View File

@ -2,10 +2,14 @@ name: Translate README
on:
workflow_dispatch:
inputs:
source_readme:
description: "Source README"
required: false
default: "./readmes/README.en.md"
target_langs:
description: "Target Languages"
required: false
default: "zh-hans,zh-hant,ja,pt-br"
default: "zh-hans,zh-hant,ja,pt-br,es,ru"
gen_dir_path:
description: "Generate Dir Name"
required: false
@ -34,6 +38,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
GPT_MODE: ${{ github.event.inputs.gpt_mode }}
SOURCE_README: ${{ github.event.inputs.source_readme }}
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}

3
.gitignore vendored
View File

@ -46,3 +46,6 @@ test.py
.test/
*.mo
apps.iml
*.db
*.mmdb
*.ipdb

2
.pylintrc Normal file
View File

@ -0,0 +1,2 @@
[MESSAGES CONTROL]
disable=missing-module-docstring,missing-class-docstring,missing-function-docstring,too-many-ancestors

View File

@ -1,4 +1,4 @@
FROM jumpserver/core-base:20241210_070105 AS stage-build
FROM jumpserver/core-base:20250427_062456 AS stage-build
ARG VERSION

View File

@ -1,6 +1,6 @@
FROM python:3.11-slim-bullseye
ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies
ARG DEPENDENCIES=" \
ca-certificates \
@ -43,18 +43,19 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=poetry.lock,target=poetry.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \
&& python3 -m venv /opt/py3 \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& . /opt/py3/bin/activate \
&& poetry config virtualenvs.create false \
&& poetry install --no-cache --only main \
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
&& bash clean_site_packages.sh \
&& poetry cache clear pypi --all
&& uv venv \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
&& ln -sf $(pwd)/.venv /opt/py3 \
&& bash utils/static_files.sh \
&& bash clean_site_packages.sh

View File

@ -24,11 +24,7 @@ RUN set -ex \
WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
COPY poetry.lock pyproject.toml ./
RUN set -ex \
&& . /opt/py3/bin/activate \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& poetry install --only xpack \
&& poetry cache clear pypi --all
RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack

View File

@ -1,6 +1,6 @@
<div align="center">
<a name="readme-top"></a>
<a href="https://jumpserver.org/index-en.html"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM tool (Bastion Host)
@ -10,7 +10,7 @@
[![][github-release-shield]][github-release-link]
[![][github-stars-shield]][github-stars-link]
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md)
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md)
</div>
<br/>
@ -19,7 +19,13 @@
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
![JumpServer Overview](https://github.com/jumpserver/jumpserver/assets/32935519/35a371cb-8590-40ed-88ec-f351f8cf9045)
<picture>
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
</picture>
## Quickstart
@ -36,18 +42,19 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
[![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
## Screenshots
<table style="border-collapse: collapse; border: 1px solid black;">
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>    
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>     
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
@ -69,9 +76,9 @@ JumpServer consists of multiple key components, which collectively form the func
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
@ -81,12 +88,6 @@ JumpServer consists of multiple key components, which collectively form the func
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
## Security
JumpServer is a mission critical product. Please refer to the Basic Security Recommendations for installation and deployment. If you encounter any security-related issues, please contact us directly:
- Email: support@fit2cloud.com
## License
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
@ -115,5 +116,3 @@ Unless required by applicable law or agreed to in writing, software distributed
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
<!-- Image link -->

View File

@ -1,121 +0,0 @@
<p align="center">
<a href="https://jumpserver.org"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
</p>
<h3 align="center">广受欢迎的开源堡垒机</h3>
<p align="center">
<a href="https://www.gnu.org/licenses/gpl-3.0.html"><img src="https://img.shields.io/github/license/jumpserver/jumpserver" alt="License: GPLv3"></a>
<a href="https://hub.docker.com/u/jumpserver"><img src="https://img.shields.io/docker/pulls/jumpserver/jms_all.svg" alt="Docker pulls"></a>
<a href="https://github.com/jumpserver/jumpserver/releases/latest"><img src="https://img.shields.io/github/v/release/jumpserver/jumpserver" alt="Latest release"></a>
<a href="https://github.com/jumpserver/jumpserver"><img src="https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square" alt="Stars"></a>
</p>
<p align="center">
10 年时间,倾情投入,用心做好一款开源堡垒机。
</p>
------------------------------
## JumpServer 是什么?
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型的资产,包括:
- **SSH**: Linux / Unix / 网络设备 等;
- **Windows**: Web 方式连接 / 原生 RDP 连接;
- **数据库**: MySQL / MariaDB / PostgreSQL / Oracle / SQLServer / ClickHouse 等;
- **NoSQL**: Redis / MongoDB 等;
- **GPT**: ChatGPT 等;
- **云服务**: Kubernetes / VMware vSphere 等;
- **Web 站点**: 各类系统的 Web 管理后台;
- **应用**: 通过 Remote App 连接各类应用。
## 产品特色
- **开源**: 零门槛,线上快速获取和安装;
- **无插件**: 仅需浏览器,极致的 Web Terminal 使用体验;
- **分布式**: 支持分布式部署和横向扩展,轻松支持大规模并发访问;
- **多云支持**: 一套系统,同时管理不同云上面的资产;
- **多租户**: 一套系统,多个子公司或部门同时使用;
- **云端存储**: 审计录像云端存储,永不丢失;
## 快速开始
- [快速入门](https://docs.jumpserver.org/zh/v3/quick_start/)
- [产品文档](https://docs.jumpserver.org)
- [在线学习](https://edu.fit2cloud.com/page/2635362)
- [知识库](https://kb.fit2cloud.com/categories/jumpserver)
## UI 展示
![UI展示](https://docs.jumpserver.org/zh/v3/img/dashboard.png)
## 在线体验
- 环境地址:<https://demo.jumpserver.org/>
| :warning: 注意 |
|:-----------------------------|
| 该环境仅作体验目的使用,我们会定时清理、重置数据! |
| 请勿修改体验环境用户的密码! |
| 请勿在环境中添加业务生产环境地址、用户名密码等敏感信息! |
## 案例研究
- [腾讯音乐娱乐集团基于JumpServer的安全运维审计解决方案](https://blog.fit2cloud.com/?p=a04cdf0d-6704-4d18-9b40-9180baecd0e2)
- [腾讯海外游戏基于JumpServer构建游戏安全运营能力](https://blog.fit2cloud.com/?p=3704)
- [万华化学通过JumpServer管理全球化分布式IT资产并且实现与云管平台的联动](https://blog.fit2cloud.com/?p=3504)
- [雪花啤酒JumpServer堡垒机使用体会](https://blog.fit2cloud.com/?p=3412)
- [顺丰科技JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147)
- [沐瞳游戏通过JumpServer管控多项目分布式资产](https://blog.fit2cloud.com/?p=3213)
- [携程JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851)
- [大智慧JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882)
- [小红书JumpServer 堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516)
- [中手游JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732)
- [中通快递JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708)
- [东方明珠JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687)
- [江苏农信JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)
## 社区交流
如果您在使用过程中有任何疑问或对建议,欢迎提交 [GitHub Issue](https://github.com/jumpserver/jumpserver/issues/new/choose)。
您也可以到我们的 [社区论坛](https://bbs.fit2cloud.com/c/js/5) 当中进行交流沟通。
## 参与贡献
欢迎提交 PR 参与贡献。 参考 [CONTRIBUTING.md](https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md)
## 组件项目
| Project | Status | Description |
|--------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------|
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI |
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal |
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
## 安全说明
JumpServer是一款安全产品请参考 [基本安全建议](https://docs.jumpserver.org/zh/master/install/install_security/)
进行安装部署。如果您发现安全相关问题,请直接联系我们:
- 邮箱support@fit2cloud.com
- 电话400-052-0755
## License & Copyright
Copyright (c) 2014-2024 飞致云, All rights reserved.
Licensed under The GNU General Public License version 3 (GPLv3) (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License at
https://www.gnu.org/licenses/gpl-3.0.html
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "
AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.

View File

@ -5,8 +5,7 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
如果你发现安全问题,请直接联系我们,我们携手让世界更好:
- ibuler@fit2cloud.com
- support@fit2cloud.com
- 400-052-0755
- support@lxware.hk
# Security Policy
@ -16,6 +15,5 @@ JumpServer is a security product, The installation and development should follow
All security bugs should be reported to the contact as below:
- ibuler@fit2cloud.com
- support@fit2cloud.com
- 400-052-0755
- support@lxware.hk

View File

@ -1,4 +1,6 @@
from .account import *
from .application import *
from .pam_dashboard import *
from .task import *
from .template import *
from .virtual import *

View File

@ -1,20 +1,27 @@
from django.db import transaction
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext_lazy as _
from rest_framework.decorators import action
from rest_framework.generics import ListAPIView, CreateAPIView
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK
from accounts import serializers
from accounts.filters import AccountFilterSet
from accounts.const import ChangeSecretRecordStatusChoice
from accounts.filters import AccountFilterSet, NodeFilterBackend
from accounts.mixins import AccountRecordViewLogMixin
from accounts.models import Account
from accounts.models import Account, ChangeSecretRecord
from assets.models import Asset, Node
from authentication.permissions import UserConfirmation, ConfirmType
from common.api.mixin import ExtraFilterFieldsMixin
from common.drf.filters import AttrRulesFilterBackend
from common.permissions import IsValidUser
from common.utils import lazyproperty, get_logger
from orgs.mixins.api import OrgBulkModelViewSet
from rbac.permissions import RBACPermission
logger = get_logger(__file__)
__all__ = [
'AccountViewSet', 'AccountSecretsViewSet',
'AccountHistoriesSecretAPI', 'AssetAccountBulkCreateApi',
@ -24,6 +31,7 @@ __all__ = [
class AccountViewSet(OrgBulkModelViewSet):
model = Account
search_fields = ('username', 'name', 'asset__name', 'asset__address', 'comment')
extra_filter_backends = [AttrRulesFilterBackend, NodeFilterBackend]
filterset_class = AccountFilterSet
serializer_classes = {
'default': serializers.AccountSerializer,
@ -33,9 +41,21 @@ class AccountViewSet(OrgBulkModelViewSet):
'partial_update': ['accounts.change_account'],
'su_from_accounts': 'accounts.view_account',
'clear_secret': 'accounts.change_account',
'move_to_assets': 'accounts.create_account',
'copy_to_assets': 'accounts.create_account',
}
export_as_zip = True
def get_queryset(self):
queryset = super().get_queryset()
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
if not asset_id:
return queryset
asset = get_object_or_404(Asset, pk=asset_id)
queryset = asset.all_accounts.all()
return queryset
@action(methods=['get'], detail=False, url_path='su-from-accounts')
def su_from_accounts(self, request, *args, **kwargs):
account_id = request.query_params.get('account')
@ -86,6 +106,45 @@ class AccountViewSet(OrgBulkModelViewSet):
self.model.objects.filter(id__in=account_ids).update(secret=None)
return Response(status=HTTP_200_OK)
def _copy_or_move_to_assets(self, request, move=False):
account = self.get_object()
asset_ids = request.data.get('assets', [])
assets = Asset.objects.filter(id__in=asset_ids)
field_names = [
'name', 'username', 'secret_type', 'secret',
'privileged', 'is_active', 'source', 'source_id', 'comment'
]
account_data = {field: getattr(account, field) for field in field_names}
creation_results = {}
success_count = 0
for asset in assets:
account_data['asset'] = asset
creation_results[asset] = {'state': 'created'}
try:
with transaction.atomic():
self.model.objects.create(**account_data)
success_count += 1
except Exception as e:
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}')
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]
if move and success_count > 0:
account.delete()
return Response(results, status=HTTP_200_OK)
@action(methods=['post'], detail=True, url_path='move-to-assets')
def move_to_assets(self, request, *args, **kwargs):
return self._copy_or_move_to_assets(request, move=True)
@action(methods=['post'], detail=True, url_path='copy-to-assets')
def copy_to_assets(self, request, *args, **kwargs):
return self._copy_or_move_to_assets(request, move=False)
class AccountSecretsViewSet(AccountRecordViewLogMixin, AccountViewSet):
"""
@ -125,17 +184,31 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
'GET': 'accounts.view_accountsecret',
}
def get_object(self):
@lazyproperty
def account(self) -> Account:
return get_object_or_404(Account, pk=self.kwargs.get('pk'))
def get_object(self):
return self.account
@lazyproperty
def latest_history(self):
return self.account.history.first()
@property
def latest_change_secret_record(self) -> ChangeSecretRecord:
return self.account.changesecretrecords.filter(
status=ChangeSecretRecordStatusChoice.pending
).order_by('-date_created').first()
@staticmethod
def filter_spm_queryset(resource_ids, queryset):
return queryset.filter(history_id__in=resource_ids)
def get_queryset(self):
account = self.get_object()
account = self.account
histories = account.history.all()
latest_history = account.history.first()
latest_history = self.latest_history
if not latest_history:
return histories
if account.secret != latest_history.secret:
@ -144,3 +217,25 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
return histories
histories = histories.exclude(history_id=latest_history.history_id)
return histories
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
queryset = list(queryset)
latest_history = self.latest_history
if not latest_history:
return queryset
latest_change_secret_record = self.latest_change_secret_record
if not latest_change_secret_record:
return queryset
if latest_change_secret_record.date_created > latest_history.history_date:
temp_history = self.model(
secret=latest_change_secret_record.new_secret,
secret_type=self.account.secret_type,
version=latest_history.version,
history_date=latest_change_secret_record.date_created,
)
queryset = [temp_history] + queryset
return queryset

View File

@ -0,0 +1,84 @@
import os
from django.conf import settings
from django.utils.translation import gettext_lazy as _, get_language
from rest_framework.decorators import action
from rest_framework.response import Response
from accounts import serializers
from accounts.models import IntegrationApplication
from audits.models import IntegrationApplicationLog
from authentication.permissions import UserConfirmation, ConfirmType
from common.exceptions import JMSException
from common.permissions import IsValidUser
from common.utils import get_request_ip
from orgs.mixins.api import OrgBulkModelViewSet
from rbac.permissions import RBACPermission
class IntegrationApplicationViewSet(OrgBulkModelViewSet):
model = IntegrationApplication
search_fields = ('name', 'comment')
serializer_classes = {
'default': serializers.IntegrationApplicationSerializer,
'get_account_secret': serializers.IntegrationAccountSecretSerializer
}
rbac_perms = {
'get_once_secret': 'accounts.change_integrationapplication',
'get_account_secret': 'accounts.view_integrationapplication'
}
def read_file(self, path):
if os.path.exists(path):
with open(path, 'r', encoding='utf-8') as file:
return file.read()
return ''
@action(
['GET'], detail=False, url_path='sdks',
permission_classes=[IsValidUser]
)
def get_sdks_info(self, request, *args, **kwargs):
code_suffix_mapper = {
'python': 'py',
'java': 'java',
'go': 'go',
'node': 'js',
'curl': 'sh',
}
sdk_language = request.query_params.get('language', 'python')
sdk_path = os.path.join(settings.APPS_DIR, 'accounts', 'demos', sdk_language)
readme_path = os.path.join(sdk_path, f'README.{get_language()}.md')
demo_path = os.path.join(sdk_path, f'demo.{code_suffix_mapper[sdk_language]}')
readme_content = self.read_file(readme_path)
demo_content = self.read_file(demo_path)
return Response(data={'readme': readme_content, 'code': demo_content})
@action(
['GET'], detail=True, url_path='secret',
permission_classes=[RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
)
def get_once_secret(self, request, *args, **kwargs):
instance = self.get_object()
return Response(data={'id': instance.id, 'secret': instance.secret})
@action(['GET'], detail=False, url_path='account-secret',
permission_classes=[RBACPermission])
def get_account_secret(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.query_params)
if not serializer.is_valid():
return Response({'error': serializer.errors}, status=400)
service = request.user
account = service.get_account(**serializer.data)
if not account:
msg = _('Account not found')
raise JMSException(code='Not found', detail='%s' % msg)
asset = account.asset
IntegrationApplicationLog.objects.create(
remote_addr=get_request_ip(request), service=service.name, service_id=service.id,
account=f'{account.name}({account.username})', asset=f'{asset.name}({asset.address})',
)
return Response(data={'id': request.user.id, 'secret': account.secret})

View File

@ -0,0 +1,130 @@
# -*- coding: utf-8 -*-
#
from collections import defaultdict
from django.db.models import Count, F, Q
from django.http.response import JsonResponse
from rest_framework.views import APIView
from accounts.models import (
Account, GatherAccountsAutomation,
PushAccountAutomation, BackupAccountAutomation,
AccountRisk, IntegrationApplication, ChangeSecretAutomation
)
from assets.const import AllTypes
from common.utils.timezone import local_monday
__all__ = ['PamDashboardApi']
class PamDashboardApi(APIView):
http_method_names = ['get']
rbac_perms = {
'GET': 'accounts.view_account',
}
@staticmethod
def get_type_to_accounts():
result = Account.objects.annotate(type=F('asset__platform__type')) \
.values('type').order_by('type').annotate(total=Count(1))
all_types_dict = dict(AllTypes.choices())
return [
{**i, 'label': all_types_dict.get(i['type'], i['type'])}
for i in result
]
@staticmethod
def get_account_risk_data(_all, query_params):
agg_map = {
'total_long_time_no_login_accounts': ('long_time_no_login_count', Q(risk='long_time_no_login')),
'total_new_found_accounts': ('new_found_count', Q(risk='new_found')),
'total_groups_changed_accounts': ('groups_changed_count', Q(risk='groups_changed')),
'total_sudoers_changed_accounts': ('sudoers_changed_count', Q(risk='sudoers_changed')),
'total_authorized_keys_changed_accounts': (
'authorized_keys_changed_count', Q(risk='authorized_keys_changed')),
'total_account_deleted_accounts': ('account_deleted_count', Q(risk='account_deleted')),
'total_password_expired_accounts': ('password_expired_count', Q(risk='password_expired')),
'total_long_time_password_accounts': ('long_time_password_count', Q(risk='long_time_password')),
'total_weak_password_accounts': ('weak_password_count', Q(risk='weak_password')),
'total_leaked_password_accounts': ('leaked_password_count', Q(risk='leaked_password')),
'total_repeated_password_accounts': ('repeated_password_count', Q(risk='repeated_password')),
}
aggregations = {
agg_key: Count('id', distinct=True, filter=agg_filter)
for param_key, (agg_key, agg_filter) in agg_map.items()
if _all or query_params.get(param_key)
}
data = {}
if aggregations:
account_stats = AccountRisk.objects.aggregate(**aggregations)
data = {param_key: account_stats.get(agg_key) for param_key, (agg_key, _) in agg_map.items() if
agg_key in account_stats}
return data
@staticmethod
def get_account_data(_all, query_params):
agg_map = {
'total_accounts': ('total_count', Count('id')),
'total_privileged_accounts': ('privileged_count', Count('id', filter=Q(privileged=True))),
'total_connectivity_ok_accounts': ('connectivity_ok_count', Count('id', filter=Q(connectivity='ok'))),
'total_secret_reset_accounts': ('secret_reset_count', Count('id', filter=Q(secret_reset=True))),
'total_valid_accounts': ('valid_count', Count('id', filter=Q(is_active=True))),
'total_week_add_accounts': ('week_add_count', Count('id', filter=Q(date_created__gte=local_monday()))),
}
aggregations = {
agg_key: agg_expr
for param_key, (agg_key, agg_expr) in agg_map.items()
if _all or query_params.get(param_key)
}
data = {}
account_stats = Account.objects.aggregate(**aggregations)
for param_key, (agg_key, __) in agg_map.items():
if agg_key in account_stats:
data[param_key] = account_stats[agg_key]
if _all or query_params.get('total_ordinary_accounts'):
if 'total_count' in account_stats and 'privileged_count' in account_stats:
data['total_ordinary_accounts'] = \
account_stats['total_count'] - account_stats['privileged_count']
return data
@staticmethod
def get_automation_counts(_all, query_params):
automation_counts = defaultdict(int)
automation_models = {
'total_count_change_secret_automation': ChangeSecretAutomation,
'total_count_gathered_account_automation': GatherAccountsAutomation,
'total_count_push_account_automation': PushAccountAutomation,
'total_count_backup_account_automation': BackupAccountAutomation,
'total_count_integration_application': IntegrationApplication,
}
for param_key, model in automation_models.items():
if _all or query_params.get(param_key):
automation_counts[param_key] = model.objects.count()
return automation_counts
def get(self, request, *args, **kwargs):
query_params = self.request.query_params
_all = query_params.get('all')
data = {}
data.update(self.get_account_data(_all, query_params))
data.update(self.get_account_risk_data(_all, query_params))
data.update(self.get_automation_counts(_all, query_params))
if _all or query_params.get('total_count_type_to_accounts'):
data.update({
'total_count_type_to_accounts': self.get_type_to_accounts(),
})
return JsonResponse(data, status=200)

View File

@ -1,5 +1,7 @@
from .backup import *
from .base import *
from .change_secret import *
from .gather_accounts import *
from .change_secret_dashboard import *
from .check_account import *
from .gather_account import *
from .push_account import *

View File

@ -1,41 +1,36 @@
# -*- coding: utf-8 -*-
#
from rest_framework import status, viewsets
from rest_framework.response import Response
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.models import (
AccountBackupAutomation, AccountBackupExecution
BackupAccountAutomation
)
from accounts.tasks import execute_account_backup_task
from common.const.choices import Trigger
from orgs.mixins.api import OrgBulkModelViewSet
from .base import AutomationExecutionViewSet
__all__ = [
'AccountBackupPlanViewSet', 'AccountBackupPlanExecutionViewSet'
'BackupAccountViewSet', 'BackupAccountExecutionViewSet'
]
class AccountBackupPlanViewSet(OrgBulkModelViewSet):
model = AccountBackupAutomation
class BackupAccountViewSet(OrgBulkModelViewSet):
model = BackupAccountAutomation
filterset_fields = ('name',)
search_fields = filterset_fields
serializer_class = serializers.AccountBackupSerializer
serializer_class = serializers.BackupAccountSerializer
class AccountBackupPlanExecutionViewSet(viewsets.ModelViewSet):
serializer_class = serializers.AccountBackupPlanExecutionSerializer
search_fields = ('trigger', 'plan__name')
filterset_fields = ('trigger', 'plan_id', 'plan__name')
http_method_names = ['get', 'post', 'options']
class BackupAccountExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_backupaccountexecution"),
("retrieve", "accounts.view_backupaccountexecution"),
("create", "accounts.add_backupaccountexecution"),
("report", "accounts.view_backupaccountexecution"),
)
tp = AutomationTypes.backup_account
def get_queryset(self):
queryset = AccountBackupExecution.objects.all()
queryset = super().get_queryset()
queryset = queryset.filter(type=self.tp)
return queryset
def create(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
pid = serializer.data.get('plan')
task = execute_account_backup_task.delay(pid=str(pid), trigger=Trigger.manual)
return Response({'task': task.id}, status=status.HTTP_201_CREATED)

View File

@ -1,8 +1,12 @@
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.utils.translation import gettext_lazy as _
from django.views.decorators.clickjacking import xframe_options_sameorigin
from rest_framework import status, mixins, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from accounts.filters import AutomationExecutionFilterSet
from accounts.models import AutomationExecution
from accounts.tasks import execute_account_automation_task
from assets import serializers
@ -13,7 +17,7 @@ from orgs.mixins import generics
__all__ = [
'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
'AutomationExecutionViewSet',
'AutomationExecutionViewSet'
]
@ -35,9 +39,10 @@ class AutomationAssetsListApi(generics.ListAPIView):
return assets
class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
class AutomationRemoveAssetApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@ -52,9 +57,10 @@ class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
return Response({'msg': 'ok'})
class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
class AutomationAddAssetApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@ -68,9 +74,10 @@ class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
return Response({"error": serializer.errors})
class AutomationNodeAddRemoveApi(generics.RetrieveUpdateAPIView):
class AutomationNodeAddRemoveApi(generics.UpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateNodeSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
action_params = ['add', 'remove']
@ -97,8 +104,8 @@ class AutomationExecutionViewSet(
):
search_fields = ('trigger', 'automation__name')
filterset_fields = ('trigger', 'automation_id', 'automation__name')
filterset_class = AutomationExecutionFilterSet
serializer_class = serializers.AutomationExecutionSerializer
tp: str
def get_queryset(self):
@ -113,3 +120,10 @@ class AutomationExecutionViewSet(
pid=str(automation.pk), trigger=Trigger.manual, tp=self.tp
)
return Response({'task': task.id}, status=status.HTTP_201_CREATED)
@xframe_options_sameorigin
@action(methods=['get'], detail=True, url_path='report')
def report(self, request, *args, **kwargs):
execution = self.get_object()
report = execution.manager.gen_report()
return HttpResponse(report)

View File

@ -1,15 +1,17 @@
# -*- coding: utf-8 -*-
#
from django.db.models import Max, Q, Subquery, OuterRef
from rest_framework import status, mixins
from rest_framework.decorators import action
from rest_framework.response import Response
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.const import AutomationTypes, ChangeSecretRecordStatusChoice
from accounts.filters import ChangeSecretRecordFilterSet
from accounts.models import ChangeSecretAutomation, ChangeSecretRecord
from accounts.tasks import execute_automation_record_task
from authentication.permissions import UserConfirmation, ConfirmType
from common.permissions import IsValidLicense
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from rbac.permissions import RBACPermission
from .base import (
@ -27,6 +29,7 @@ __all__ = [
class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
model = ChangeSecretAutomation
permission_classes = [RBACPermission, IsValidLicense]
filterset_fields = ('name', 'secret_type', 'secret_strategy')
search_fields = filterset_fields
serializer_class = serializers.ChangeSecretAutomationSerializer
@ -34,7 +37,9 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = ChangeSecretRecordFilterSet
search_fields = ('asset__address',)
permission_classes = [RBACPermission, IsValidLicense]
search_fields = ('asset__address', 'account__username')
ordering_fields = ('date_finished',)
tp = AutomationTypes.change_secret
serializer_classes = {
'default': serializers.ChangeSecretRecordSerializer,
@ -43,6 +48,8 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
rbac_perms = {
'execute': 'accounts.add_changesecretexecution',
'secret': 'accounts.view_changesecretrecord',
'dashboard': 'accounts.view_changesecretrecord',
'ignore_fail': 'accounts.view_changesecretrecord',
}
def get_permissions(self):
@ -53,8 +60,35 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
]
return super().get_permissions()
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
if self.action == 'dashboard':
return self.get_dashboard_queryset(queryset)
return queryset
@staticmethod
def get_dashboard_queryset(queryset):
recent_dates = queryset.values('account').annotate(
max_date_finished=Max('date_finished')
)
recent_success_accounts = queryset.filter(
account=OuterRef('account'),
date_finished=Subquery(
recent_dates.filter(account=OuterRef('account')).values('max_date_finished')[:1]
)
).filter(Q(status=ChangeSecretRecordStatusChoice.success))
failed_records = queryset.filter(
~Q(account__in=Subquery(recent_success_accounts.values('account'))),
status=ChangeSecretRecordStatusChoice.failed,
ignore_fail=False
)
return failed_records
def get_queryset(self):
return ChangeSecretRecord.objects.all()
return ChangeSecretRecord.get_valid_records()
@action(methods=['post'], detail=False, url_path='execute')
def execute(self, request, *args, **kwargs):
@ -75,19 +109,31 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
serializer = self.get_serializer(instance)
return Response(serializer.data)
@action(methods=['get'], detail=False, url_path='dashboard')
def dashboard(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@action(methods=['patch'], detail=True, url_path='ignore-fail')
def ignore_fail(self, request, *args, **kwargs):
instance = self.get_object()
instance.ignore_fail = True
instance.save(update_fields=['ignore_fail'])
return Response(status=status.HTTP_200_OK)
class ChangSecretExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_changesecretexecution"),
("retrieve", "accounts.view_changesecretexecution"),
("create", "accounts.add_changesecretexecution"),
("report", "accounts.view_changesecretexecution"),
)
permission_classes = [RBACPermission, IsValidLicense]
tp = AutomationTypes.change_secret
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(automation__type=self.tp)
queryset = queryset.filter(type=self.tp)
return queryset

View File

@ -0,0 +1,185 @@
# -*- coding: utf-8 -*-
#
from collections import defaultdict
from django.core.cache import cache
from django.http.response import JsonResponse
from django.utils import timezone
from rest_framework.views import APIView
from accounts.const import AutomationTypes, ChangeSecretRecordStatusChoice
from accounts.models import ChangeSecretAutomation, AutomationExecution, ChangeSecretRecord
from assets.models import Node, Asset
from common.const import Status
from common.permissions import IsValidLicense
from common.utils import lazyproperty
from common.utils.timezone import local_zero_hour, local_now
from ops.celery import app
from rbac.permissions import RBACPermission
__all__ = ['ChangeSecretDashboardApi']
class ChangeSecretDashboardApi(APIView):
http_method_names = ['get']
rbac_perms = {
'GET': 'accounts.view_changesecretautomation',
}
permission_classes = [RBACPermission, IsValidLicense]
tp = AutomationTypes.change_secret
task_name = 'accounts.tasks.automation.execute_account_automation_task'
ongoing_change_secret_cache_key = "ongoing_change_secret_cache_key"
@lazyproperty
def days(self):
count = self.request.query_params.get('days', 1)
return int(count)
@property
def days_to_datetime(self):
if self.days == 1:
return local_zero_hour()
return local_now() - timezone.timedelta(days=self.days)
def get_queryset_date_filter(self, qs, query_field='date_updated'):
return qs.filter(**{f'{query_field}__gte': self.days_to_datetime})
@lazyproperty
def date_range_list(self):
return [
(local_now() - timezone.timedelta(days=i)).date()
for i in range(self.days - 1, -1, -1)
]
def filter_by_date_range(self, queryset, field_name):
date_range_bounds = self.days_to_datetime.date(), (local_now() + timezone.timedelta(days=1)).date()
return queryset.filter(**{f'{field_name}__range': date_range_bounds})
def calculate_daily_metrics(self, queryset, date_field):
filtered_queryset = self.filter_by_date_range(queryset, date_field)
results = filtered_queryset.values_list(date_field, 'status')
status_counts = defaultdict(lambda: defaultdict(int))
for date_finished, status in results:
date_str = str(date_finished.date())
if status == ChangeSecretRecordStatusChoice.failed:
status_counts[date_str]['failed'] += 1
elif status == ChangeSecretRecordStatusChoice.success:
status_counts[date_str]['success'] += 1
metrics = defaultdict(list)
for date in self.date_range_list:
date_str = str(date)
for status in ['success', 'failed']:
metrics[status].append(status_counts[date_str].get(status, 0))
return metrics
def get_daily_success_and_failure_metrics(self):
metrics = self.calculate_daily_metrics(self.change_secret_records_queryset, 'date_finished')
return metrics.get('success', []), metrics.get('failed', [])
@lazyproperty
def change_secrets_queryset(self):
return ChangeSecretAutomation.objects.all()
@lazyproperty
def change_secret_records_queryset(self):
return ChangeSecretRecord.get_valid_records()
def get_change_secret_asset_queryset(self):
qs = self.change_secrets_queryset
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
nodes = Node.objects.filter(id__in=node_ids)
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
return Asset.objects.filter(id__in=asset_ids)
def get_filtered_counts(self, qs, field=None):
if field is None:
return qs.count()
return self.get_queryset_date_filter(qs, field).count()
def get_status_counts(self, executions):
executions = executions.filter(type=self.tp)
total, failed, success = 0, 0, 0
for status in executions.values_list('status', flat=True):
total += 1
if status in [Status.failed, Status.error]:
failed += 1
elif status == Status.success:
success += 1
return {
'total_count_change_secret_executions': total,
'total_count_success_change_secret_executions': success,
'total_count_failed_change_secret_executions': failed,
}
def get(self, request, *args, **kwargs):
query_params = self.request.query_params
data = {}
_all = query_params.get('all')
if _all or query_params.get('total_count_change_secrets'):
data['total_count_change_secrets'] = self.get_filtered_counts(
self.change_secrets_queryset
)
if _all or query_params.get('total_count_periodic_change_secrets'):
data['total_count_periodic_change_secrets'] = self.get_filtered_counts(
self.change_secrets_queryset.filter(is_periodic=True)
)
if _all or query_params.get('total_count_change_secret_assets'):
data['total_count_change_secret_assets'] = self.get_change_secret_asset_queryset().count()
if _all or query_params.get('total_count_change_secret_status'):
executions = self.get_queryset_date_filter(AutomationExecution.objects.all(), 'date_start')
data.update(self.get_status_counts(executions))
if _all or query_params.get('daily_success_and_failure_metrics'):
success, failed = self.get_daily_success_and_failure_metrics()
data.update({
'dates_metrics_date': [date.strftime('%m-%d') for date in self.date_range_list] or ['0'],
'dates_metrics_total_count_success': success,
'dates_metrics_total_count_failed': failed,
})
if _all or query_params.get('total_count_ongoing_change_secret'):
ongoing_counts = cache.get(self.ongoing_change_secret_cache_key)
if ongoing_counts is None:
execution_ids = []
inspect = app.control.inspect()
try:
active_tasks = inspect.active()
except Exception:
active_tasks = None
if active_tasks:
for tasks in active_tasks.values():
for task in tasks:
_id = task.get('id')
name = task.get('name')
tp = task.get('kwargs', {}).get('tp')
if name == self.task_name and tp == self.tp:
execution_ids.append(_id)
snapshots = AutomationExecution.objects.filter(id__in=execution_ids).values_list('snapshot', flat=True)
asset_ids = {asset for i in snapshots for asset in i.get('assets', [])}
account_ids = {account for i in snapshots for account in i.get('accounts', [])}
ongoing_counts = (len(execution_ids), len(asset_ids), len(account_ids))
data['total_count_ongoing_change_secret'] = ongoing_counts[0]
data['total_count_ongoing_change_secret_assets'] = ongoing_counts[1]
data['total_count_ongoing_change_secret_accounts'] = ongoing_counts[2]
cache.set(self.ongoing_change_secret_cache_key, ongoing_counts, 60)
else:
data['total_count_ongoing_change_secret'] = ongoing_counts[0]
data['total_count_ongoing_change_secret_assets'] = ongoing_counts[1]
data['total_count_ongoing_change_secret_accounts'] = ongoing_counts[2]
return JsonResponse(data, status=200)

View File

@ -0,0 +1,162 @@
# -*- coding: utf-8 -*-
#
from django.db.models import Q, Count
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from rest_framework.decorators import action
from rest_framework.exceptions import MethodNotAllowed
from rest_framework.response import Response
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.models import (
CheckAccountAutomation,
AccountRisk,
RiskChoice,
CheckAccountEngine,
AutomationExecution,
)
from assets.models import Asset
from common.api import JMSModelViewSet
from common.permissions import IsValidLicense
from common.utils import many_get
from orgs.mixins.api import OrgBulkModelViewSet
from rbac.permissions import RBACPermission
from .base import AutomationExecutionViewSet
from ...filters import NodeFilterBackend
from ...risk_handlers import RiskHandler
__all__ = [
"CheckAccountAutomationViewSet",
"CheckAccountExecutionViewSet",
"AccountRiskViewSet",
"CheckAccountEngineViewSet",
]
class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
model = CheckAccountAutomation
filterset_fields = ("name",)
search_fields = filterset_fields
permission_classes = [RBACPermission, IsValidLicense]
serializer_class = serializers.CheckAccountAutomationSerializer
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_checkaccountexecution"),
("retrieve", "accounts.view_checkaccountsexecution"),
("create", "accounts.add_checkaccountexecution"),
("adhoc", "accounts.add_checkaccountexecution"),
("report", "accounts.view_checkaccountsexecution"),
)
ordering = ("-date_created",)
tp = AutomationTypes.check_account
permission_classes = [RBACPermission, IsValidLicense]
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(type=self.tp)
return queryset
@action(methods=["get"], detail=False, url_path="adhoc")
def adhoc(self, request, *args, **kwargs):
asset_id = request.query_params.get("asset_id")
if not asset_id:
return Response(status=400, data={"asset_id": "This field is required."})
asset = get_object_or_404(Asset, pk=asset_id)
name = "Check asset risk: {}".format(asset.name)
execution = AutomationExecution()
execution.snapshot = {
"assets": [asset_id],
"nodes": [],
"type": AutomationTypes.check_account,
"engines": "__all__",
"name": name,
}
execution.save()
execution.start()
report = execution.manager.gen_report()
return HttpResponse(report)
class AccountRiskViewSet(OrgBulkModelViewSet):
model = AccountRisk
search_fields = ["username", "asset__name"]
filterset_fields = ("risk", "status", "asset_id")
extra_filter_backends = [NodeFilterBackend]
permission_classes = [RBACPermission, IsValidLicense]
serializer_classes = {
"default": serializers.AccountRiskSerializer,
"assets": serializers.AssetRiskSerializer,
"handle": serializers.HandleRiskSerializer,
}
ordering_fields = ("asset", "risk", "status", "username", "date_created")
ordering = ("status", "asset", "date_created")
rbac_perms = {
"sync_accounts": "assets.add_accountrisk",
"assets": "accounts.view_accountrisk",
"handle": "accounts.change_accountrisk",
}
def update(self, request, *args, **kwargs):
raise MethodNotAllowed("PUT")
def create(self, request, *args, **kwargs):
raise MethodNotAllowed("POST")
@action(methods=["get"], detail=False, url_path="assets")
def assets(self, request, *args, **kwargs):
annotations = {
f"{risk[0]}_count": Count("id", filter=Q(risk=risk[0]))
for risk in RiskChoice.choices
}
queryset = (
AccountRisk.objects.select_related(
"asset", "asset__platform"
) # 使用 select_related 来优化 asset 和 asset__platform 的查询
.values(
"asset__id", "asset__name", "asset__address", "asset__platform__name"
) # 添加需要的字段
.annotate(risk_total=Count("id")) # 计算风险总数
.annotate(**annotations) # 使用上面定义的 annotations 进行计数
)
return self.get_paginated_response_from_queryset(queryset)
@action(methods=["post"], detail=False, url_path="handle")
def handle(self, request, *args, **kwargs):
s = self.get_serializer(data=request.data)
s.is_valid(raise_exception=True)
asset, username, act, risk = many_get(
s.validated_data, ("asset", "username", "action", "risk")
)
handler = RiskHandler(asset=asset, username=username, request=self.request)
try:
risk = handler.handle(act, risk)
s = serializers.AccountRiskSerializer(instance=risk)
return Response(data=s.data)
except Exception as e:
return Response(status=400, data=str(e))
class CheckAccountEngineViewSet(JMSModelViewSet):
search_fields = ("name",)
serializer_class = serializers.CheckAccountEngineSerializer
permission_classes = [RBACPermission, IsValidLicense]
perm_model = CheckAccountEngine
http_method_names = ['get', 'options']
def get_queryset(self):
return CheckAccountEngine.get_default_engines()
def filter_queryset(self, queryset: list):
search = self.request.GET.get('search')
if search is not None:
queryset = [
item for item in queryset
if search in item['name']
]
return queryset

View File

@ -0,0 +1,131 @@
# -*- coding: utf-8 -*-
#
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.clickjacking import xframe_options_sameorigin
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.filters import GatheredAccountFilterSet, NodeFilterBackend
from accounts.models import GatherAccountsAutomation, AutomationExecution, Account
from accounts.models import GatheredAccount
from assets.models import Asset
from common.const import ConfirmOrIgnore
from common.utils.http import is_true
from orgs.mixins.api import OrgBulkModelViewSet
from .base import AutomationExecutionViewSet
__all__ = [
"DiscoverAccountsAutomationViewSet",
"DiscoverAccountsExecutionViewSet",
"GatheredAccountViewSet",
]
from ...risk_handlers import RiskHandler
class DiscoverAccountsAutomationViewSet(OrgBulkModelViewSet):
model = GatherAccountsAutomation
filterset_fields = ("name",)
search_fields = filterset_fields
serializer_class = serializers.DiscoverAccountAutomationSerializer
class DiscoverAccountsExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_gatheraccountsexecution"),
("retrieve", "accounts.view_gatheraccountsexecution"),
("create", "accounts.add_gatheraccountsexecution"),
("adhoc", "accounts.add_gatheraccountsexecution"),
("report", "accounts.view_gatheraccountsexecution"),
)
tp = AutomationTypes.gather_accounts
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(type=self.tp)
return queryset
@xframe_options_sameorigin
@action(methods=["get"], detail=False, url_path="adhoc")
def adhoc(self, request, *args, **kwargs):
asset_id = request.query_params.get("asset_id")
if not asset_id:
return Response(status=400, data={"asset_id": "This field is required."})
asset = get_object_or_404(Asset, pk=asset_id)
execution = AutomationExecution()
execution.snapshot = {
"assets": [asset_id],
"nodes": [],
"type": "gather_accounts",
"is_sync_account": False,
"check_risk": True,
"name": "Adhoc gather accounts: {}".format(asset.name),
}
execution.save()
execution.start()
report = execution.manager.gen_report()
return HttpResponse(report)
class GatheredAccountViewSet(OrgBulkModelViewSet):
model = GatheredAccount
search_fields = ("username",)
filterset_class = GatheredAccountFilterSet
extra_filter_backends = [NodeFilterBackend]
ordering = ("status",)
serializer_classes = {
"default": serializers.DiscoverAccountSerializer,
"status": serializers.DiscoverAccountActionSerializer,
"details": serializers.DiscoverAccountDetailsSerializer
}
rbac_perms = {
"status": "assets.change_gatheredaccount",
"details": "assets.view_gatheredaccount"
}
@action(methods=["put"], detail=False, url_path="status")
def status(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
validated_data = serializer.validated_data
ids = validated_data.get('ids', [])
new_status = validated_data.get('status')
updated_instances = GatheredAccount.objects.filter(id__in=ids).select_related('asset')
if new_status == ConfirmOrIgnore.confirmed:
GatheredAccount.sync_accounts(updated_instances)
updated_instances.update(present=True)
updated_instances.update(status=new_status)
return Response(status=status.HTTP_200_OK)
def perform_destroy(self, instance):
request = self.request
params = request.query_params
is_delete_remote = params.get("is_delete_remote")
is_delete_account = params.get("is_delete_account")
asset_id = params.get("asset")
username = params.get("username")
if is_true(is_delete_remote):
self._delete_remote(asset_id, username)
if is_true(is_delete_account):
account = get_object_or_404(Account, username=username, asset_id=asset_id)
account.delete()
super().perform_destroy(instance)
def _delete_remote(self, asset_id, username):
asset = get_object_or_404(Asset, pk=asset_id)
handler = RiskHandler(asset, username, request=self.request)
handler.handle_delete_remote()
@action(methods=["get"], detail=True, url_path="details")
def details(self, request, *args, **kwargs):
pk = kwargs.get('pk')
account = get_object_or_404(GatheredAccount, pk=pk)
serializer = self.get_serializer(account.detail)
return Response(data=serializer.data)

View File

@ -1,59 +0,0 @@
# -*- coding: utf-8 -*-
#
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.filters import GatheredAccountFilterSet
from accounts.models import GatherAccountsAutomation
from accounts.models import GatheredAccount
from orgs.mixins.api import OrgBulkModelViewSet
from .base import AutomationExecutionViewSet
__all__ = [
'GatherAccountsAutomationViewSet', 'GatherAccountsExecutionViewSet',
'GatheredAccountViewSet'
]
class GatherAccountsAutomationViewSet(OrgBulkModelViewSet):
model = GatherAccountsAutomation
filterset_fields = ('name',)
search_fields = filterset_fields
serializer_class = serializers.GatherAccountAutomationSerializer
class GatherAccountsExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_gatheraccountsexecution"),
("retrieve", "accounts.view_gatheraccountsexecution"),
("create", "accounts.add_gatheraccountsexecution"),
)
tp = AutomationTypes.gather_accounts
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(automation__type=self.tp)
return queryset
class GatheredAccountViewSet(OrgBulkModelViewSet):
model = GatheredAccount
search_fields = ('username',)
filterset_class = GatheredAccountFilterSet
serializer_classes = {
'default': serializers.GatheredAccountSerializer,
}
rbac_perms = {
'sync_accounts': 'assets.add_gatheredaccount',
}
@action(methods=['post'], detail=False, url_path='sync-accounts')
def sync_accounts(self, request, *args, **kwargs):
gathered_account_ids = request.data.get('gathered_account_ids')
gathered_accounts = self.model.objects.filter(id__in=gathered_account_ids)
self.model.sync_accounts(gathered_accounts)
return Response(status=status.HTTP_201_CREATED)

View File

@ -1,15 +1,16 @@
# -*- coding: utf-8 -*-
#
from rest_framework import mixins
from accounts import serializers
from accounts.const import AutomationTypes
from accounts.models import PushAccountAutomation, ChangeSecretRecord
from orgs.mixins.api import OrgBulkModelViewSet
from accounts.filters import PushAccountRecordFilterSet
from accounts.models import PushAccountAutomation, PushSecretRecord
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
)
from .change_secret import ChangeSecretRecordViewSet
__all__ = [
'PushAccountAutomationViewSet', 'PushAccountAssetsListApi', 'PushAccountRemoveAssetApi',
@ -30,24 +31,28 @@ class PushAccountExecutionViewSet(AutomationExecutionViewSet):
("list", "accounts.view_pushaccountexecution"),
("retrieve", "accounts.view_pushaccountexecution"),
("create", "accounts.add_pushaccountexecution"),
("report", "accounts.view_pushaccountexecution"),
)
tp = AutomationTypes.push_account
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(automation__type=self.tp)
queryset = queryset.filter(type=self.tp)
return queryset
class PushAccountRecordViewSet(ChangeSecretRecordViewSet):
serializer_class = serializers.ChangeSecretRecordSerializer
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = PushAccountRecordFilterSet
search_fields = ('asset__address', 'account__username')
ordering_fields = ('date_finished',)
tp = AutomationTypes.push_account
serializer_classes = {
'default': serializers.PushSecretRecordSerializer,
}
def get_queryset(self):
return ChangeSecretRecord.objects.filter(
execution__automation__type=AutomationTypes.push_account
)
return PushSecretRecord.get_valid_records()
class PushAccountAssetsListApi(AutomationAssetsListApi):

View File

@ -3,15 +3,17 @@ import time
from collections import defaultdict, OrderedDict
from django.conf import settings
from django.db.models import F
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from xlsxwriter import Workbook
from accounts.const import AccountBackupType
from accounts.models.automations.backup_account import AccountBackupAutomation
from accounts.models import BackupAccountAutomation, Account
from accounts.notifications import AccountBackupExecutionTaskMsg, AccountBackupByObjStorageExecutionTaskMsg
from accounts.serializers import AccountSecretSerializer
from assets.const import AllTypes
from common.const import Status
from common.utils.file import encrypt_and_compress_zip_file, zip_files
from common.utils.timezone import local_now_filename, local_now_display
from terminal.models.component.storage import ReplayStorage
@ -20,6 +22,7 @@ from users.models import User
PATH = os.path.join(os.path.dirname(settings.BASE_DIR), 'tmp')
split_help_text = _('The account key will be split into two parts and sent')
class RecipientsNotFound(Exception):
pass
@ -33,17 +36,26 @@ class BaseAccountHandler:
if isinstance(v, OrderedDict):
cls.unpack_data(v, data)
else:
if isinstance(v, dict):
v = v.get('label')
elif v is None:
v = ''
data[k] = v
return data
@classmethod
def get_header_fields(cls, serializer: serializers.Serializer):
try:
backup_fields = getattr(serializer, 'Meta').fields_backup
exclude_backup_fields = getattr(serializer, 'Meta').exclude_backup_fields
except AttributeError:
backup_fields = serializer.fields.keys()
exclude_backup_fields = []
backup_fields = serializer.fields.keys()
header_fields = {}
for field in backup_fields:
if field in exclude_backup_fields:
continue
v = serializer.fields[field]
if isinstance(v, serializers.Serializer):
_fields = cls.get_header_fields(v)
@ -73,9 +85,9 @@ class BaseAccountHandler:
class AssetAccountHandler(BaseAccountHandler):
@staticmethod
def get_filename(plan_name):
def get_filename(name):
filename = os.path.join(
PATH, f'{plan_name}-{local_now_filename()}-{time.time()}.xlsx'
PATH, f'{name}-{local_now_filename()}-{time.time()}.xlsx'
)
return filename
@ -117,32 +129,41 @@ class AssetAccountHandler(BaseAccountHandler):
cls.handler_secret(data, section)
data_map.update(cls.add_rows(data, header_fields, sheet_name))
number_of_backup_accounts = _('Number of backup accounts')
print('\n\033[33m- {}: {}\033[0m'.format(number_of_backup_accounts, accounts.count()))
print('\033[33m- {}: {}\033[0m'.format(number_of_backup_accounts, accounts.count()))
return data_map
class AccountBackupHandler:
def __init__(self, execution):
def __init__(self, manager, execution):
self.manager = manager
self.execution = execution
self.plan_name = self.execution.plan.name
self.is_frozen = False # 任务状态冻结标志
self.name = self.execution.snapshot.get('name', '-')
def get_accounts(self):
# TODO 可以优化一下查询 在账号上做 category 的缓存 避免数据量大时连表操作
types = self.execution.snapshot.get('types', [])
self.manager.summary['total_types'] = len(types)
qs = Account.objects.filter(
asset__platform__type__in=types
).annotate(type=F('asset__platform__type'))
return qs
def create_excel(self, section='complete'):
hint = _('Generating asset or application related backup information files')
hint = _('Generating asset related backup information files')
print(
'\n'
f'\033[32m>>> {hint}\033[0m'
''
)
# Print task start date
time_start = time.time()
files = []
accounts = self.execution.backup_accounts
accounts = self.get_accounts()
self.manager.summary['total_accounts'] = accounts.count()
data_map = AssetAccountHandler.create_data_map(accounts, section)
if not data_map:
return files
filename = AssetAccountHandler.get_filename(self.plan_name)
filename = AssetAccountHandler.get_filename(self.name)
wb = Workbook(filename)
for sheet, data in data_map.items():
@ -153,7 +174,7 @@ class AccountBackupHandler:
wb.close()
files.append(filename)
timedelta = round((time.time() - time_start), 2)
time_cost = _('Time cost')
time_cost = _('Duration')
file_created = _('Backup file creation completed')
print('{}: {} {}s'.format(file_created, time_cost, timedelta))
return files
@ -162,22 +183,21 @@ class AccountBackupHandler:
if not files:
return
recipients = User.objects.filter(id__in=list(recipients))
msg = _("Start sending backup emails")
print(
'\n'
f'\033[32m>>> {_("Start sending backup emails")}\033[0m'
f'\033[32m>>> {msg}\033[0m'
''
)
plan_name = self.plan_name
name = self.name
for user in recipients:
if not user.secret_key:
attachment_list = []
else:
attachment = os.path.join(PATH, f'{plan_name}-{local_now_filename()}-{time.time()}.zip')
attachment = os.path.join(PATH, f'{name}-{local_now_filename()}-{time.time()}.zip')
encrypt_and_compress_zip_file(attachment, user.secret_key, files)
attachment_list = [attachment, ]
AccountBackupExecutionTaskMsg(plan_name, user).publish(attachment_list)
email_sent_to = _('Email sent to')
print('{} {}({})'.format(email_sent_to, user, user.email))
attachment_list = [attachment]
AccountBackupExecutionTaskMsg(name, user).publish(attachment_list)
for file in files:
os.remove(file)
@ -186,63 +206,41 @@ class AccountBackupHandler:
return
recipients = ReplayStorage.objects.filter(id__in=list(recipients))
print(
'\n'
'\033[32m>>> 📃 ---> sftp \033[0m'
''
)
plan_name = self.plan_name
name = self.name
encrypt_file = _('Encrypting files using encryption password')
for rec in recipients:
attachment = os.path.join(PATH, f'{plan_name}-{local_now_filename()}-{time.time()}.zip')
attachment = os.path.join(PATH, f'{name}-{local_now_filename()}-{time.time()}.zip')
if password:
print(f'\033[32m>>> {encrypt_file}\033[0m')
encrypt_and_compress_zip_file(attachment, password, files)
else:
zip_files(attachment, files)
attachment_list = attachment
AccountBackupByObjStorageExecutionTaskMsg(plan_name, rec).publish(attachment_list)
AccountBackupByObjStorageExecutionTaskMsg(name, rec).publish(attachment_list)
file_sent_to = _('The backup file will be sent to')
print('{}: {}({})'.format(file_sent_to, rec.name, rec.id))
for file in files:
os.remove(file)
def step_perform_task_update(self, is_success, reason):
self.execution.reason = reason[:1024]
self.execution.is_success = is_success
self.execution.save()
finish = _('Finish')
print(f'\n{finish}\n')
@staticmethod
def step_finished(is_success):
if is_success:
print(_('Success'))
else:
print(_('Failed'))
def _run(self):
is_success = False
error = '-'
try:
backup_type = self.execution.snapshot.get('backup_type', AccountBackupType.email.value)
if backup_type == AccountBackupType.email.value:
backup_type = self.execution.snapshot.get('backup_type', AccountBackupType.email)
if backup_type == AccountBackupType.email:
self.backup_by_email()
elif backup_type == AccountBackupType.object_storage.value:
elif backup_type == AccountBackupType.object_storage:
self.backup_by_obj_storage()
except Exception as e:
self.is_frozen = True
print(e)
error = str(e)
else:
is_success = True
finally:
reason = error
self.step_perform_task_update(is_success, reason)
self.step_finished(is_success)
print(f'\033[31m>>> {error}\033[0m')
self.execution.status = Status.error
self.execution.summary['error'] = error
def backup_by_obj_storage(self):
object_id = self.execution.snapshot.get('id')
zip_encrypt_password = AccountBackupAutomation.objects.get(id=object_id).zip_encrypt_password
zip_encrypt_password = BackupAccountAutomation.objects.get(id=object_id).zip_encrypt_password
obj_recipients_part_one = self.execution.snapshot.get('obj_recipients_part_one', [])
obj_recipients_part_two = self.execution.snapshot.get('obj_recipients_part_two', [])
no_assigned_sftp_server = _('The backup task has no assigned sftp server')
@ -266,7 +264,6 @@ class AccountBackupHandler:
self.send_backup_obj_storage(files, recipients, zip_encrypt_password)
def backup_by_email(self):
warn_text = _('The backup task has no assigned recipient')
recipients_part_one = self.execution.snapshot.get('recipients_part_one', [])
recipients_part_two = self.execution.snapshot.get('recipients_part_two', [])
@ -276,7 +273,7 @@ class AccountBackupHandler:
f'\033[31m>>> {warn_text}\033[0m'
''
)
raise RecipientsNotFound('Not Found Recipients')
return
if recipients_part_one and recipients_part_two:
print(f'\033[32m>>> {split_help_text}\033[0m')
files = self.create_excel(section='front')
@ -290,18 +287,5 @@ class AccountBackupHandler:
self.send_backup_mail(files, recipients)
def run(self):
plan_start = _('Plan start')
plan_end = _('Plan end')
time_cost = _('Time cost')
error = _('An exception occurred during task execution')
print('{}: {}'.format(plan_start, local_now_display()))
time_start = time.time()
try:
self._run()
except Exception as e:
print(error)
print(e)
finally:
print('\n{}: {}'.format(plan_end, local_now_display()))
timedelta = round((time.time() - time_start), 2)
print('{}: {}s'.format(time_cost, timedelta))
print('{}: {}'.format(_('Plan start'), local_now_display()))
self._run()

View File

@ -1,48 +1,30 @@
# -*- coding: utf-8 -*-
#
import time
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from assets.automations.base.manager import BaseManager
from common.utils.timezone import local_now_display
from .handlers import AccountBackupHandler
class AccountBackupManager:
def __init__(self, execution):
self.execution = execution
self.date_start = timezone.now()
self.time_start = time.time()
self.date_end = None
self.time_end = None
self.timedelta = 0
class AccountBackupManager(BaseManager):
def do_run(self):
execution = self.execution
account_backup_execution_being_executed = _('The account backup plan is being executed')
print(f'\n\033[33m# {account_backup_execution_being_executed}\033[0m')
handler = AccountBackupHandler(execution)
print(f'\033[33m# {account_backup_execution_being_executed}\033[0m')
handler = AccountBackupHandler(self, execution)
handler.run()
def pre_run(self):
self.execution.date_start = self.date_start
self.execution.save()
def post_run(self):
self.time_end = time.time()
self.date_end = timezone.now()
def send_report_if_need(self):
pass
def print_summary(self):
print('\n\n' + '-' * 80)
plan_execution_end = _('Plan execution end')
print('{} {}\n'.format(plan_execution_end, local_now_display()))
self.timedelta = self.time_end - self.time_start
time_cost = _('Time cost')
print('{}: {}s'.format(time_cost, self.timedelta))
self.execution.timedelta = self.timedelta
self.execution.save()
time_cost = _('Duration')
print('{}: {}s'.format(time_cost, self.duration))
def run(self):
self.pre_run()
self.do_run()
self.post_run()
def get_report_template(self):
return "accounts/backup_account_report.html"

View File

@ -1,12 +1,203 @@
from copy import deepcopy
from django.conf import settings
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from accounts.automations.methods import platform_automation_methods
from accounts.const import SSHKeyStrategy, SecretStrategy, SecretType, ChangeSecretRecordStatusChoice
from accounts.models import BaseAccountQuerySet
from accounts.utils import SecretGenerator
from assets.automations.base.manager import BasePlaybookManager
from assets.const import HostTypes
from common.db.utils import safe_db_connection
from common.utils import get_logger
logger = get_logger(__name__)
class AccountBasePlaybookManager(BasePlaybookManager):
template_path = ''
@property
def platform_automation_methods(self):
return platform_automation_methods
class BaseChangeSecretPushManager(AccountBasePlaybookManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.secret_type = self.execution.snapshot.get('secret_type')
self.secret_strategy = self.execution.snapshot.get(
'secret_strategy', SecretStrategy.custom
)
self.ssh_key_change_strategy = self.execution.snapshot.get(
'ssh_key_change_strategy', SSHKeyStrategy.set_jms
)
self.account_ids = self.execution.snapshot['accounts']
self.record_map = self.execution.snapshot.get('record_map', {}) # 这个是某个失败的记录重试
self.name_recorder_mapper = {} # 做个映射,方便后面处理
def gen_account_inventory(self, account, asset, h, path_dir):
raise NotImplementedError
def get_ssh_params(self, secret, secret_type):
kwargs = {}
if secret_type != SecretType.SSH_KEY:
return kwargs
kwargs['strategy'] = self.ssh_key_change_strategy
kwargs['exclusive'] = 'yes' if kwargs['strategy'] == SSHKeyStrategy.set else 'no'
if kwargs['strategy'] == SSHKeyStrategy.set_jms:
kwargs['regexp'] = '.*{}$'.format(secret.split()[2].strip())
return kwargs
def get_secret(self, account):
if self.secret_strategy == SecretStrategy.custom:
new_secret = self.execution.snapshot['secret']
else:
generator = SecretGenerator(
self.secret_strategy, self.secret_type,
self.execution.snapshot.get('password_rules')
)
new_secret = generator.get_secret()
return new_secret
def get_accounts(self, privilege_account) -> BaseAccountQuerySet | None:
if not privilege_account:
print('Not privilege account')
return
asset = privilege_account.asset
accounts = asset.all_accounts.all()
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
if self.secret_type:
accounts = accounts.filter(secret_type=self.secret_type)
if settings.CHANGE_AUTH_PLAN_SECURE_MODE_ENABLED:
accounts = accounts.filter(privileged=False).exclude(
username__in=['root', 'administrator', privilege_account.username]
)
return accounts
def handle_ssh_secret(self, secret_type, new_secret, path_dir):
private_key_path = None
if secret_type == SecretType.SSH_KEY:
private_key_path = self.generate_private_key_path(new_secret, path_dir)
new_secret = self.generate_public_key(new_secret)
return new_secret, private_key_path
def gen_inventory(self, h, account, new_secret, private_key_path, asset):
secret_type = account.secret_type
h['ssh_params'].update(self.get_ssh_params(new_secret, secret_type))
h['account'] = {
'name': account.name,
'username': account.username,
'full_username': account.full_username,
'secret_type': secret_type,
'secret': account.escape_jinja2_syntax(new_secret),
'private_key_path': private_key_path,
'become': account.get_ansible_become_auth(),
}
if asset.platform.type == 'oracle':
h['account']['mode'] = 'sysdba' if account.privileged else None
return h
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
host = super().host_callback(
host, asset=asset, account=account, automation=automation,
path_dir=path_dir, **kwargs
)
if host.get('error'):
return host
host['check_conn_after_change'] = self.execution.snapshot.get('check_conn_after_change', True)
host['ssh_params'] = {}
accounts = self.get_accounts(account)
error_msg = _("No pending accounts found")
if not accounts:
print(f'{asset}: {error_msg}')
return []
if asset.type == HostTypes.WINDOWS:
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
inventory_hosts = []
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
print(f'Windows {asset} does not support ssh key push')
return inventory_hosts
for account in accounts:
h = deepcopy(host)
h['name'] += '(' + account.username + ')' # To distinguish different accounts
try:
h = self.gen_account_inventory(account, asset, h, path_dir)
except Exception as e:
h['error'] = str(e)
inventory_hosts.append(h)
return inventory_hosts
@staticmethod
def save_record(recorder):
recorder.save(update_fields=['error', 'status', 'date_finished'])
def on_host_success(self, host, result):
recorder = self.name_recorder_mapper.get(host)
if not recorder:
return
recorder.status = ChangeSecretRecordStatusChoice.success.value
recorder.date_finished = timezone.now()
account = recorder.account
if not account:
print("Account not found, deleted ?")
return
account.secret = getattr(recorder, 'new_secret', account.secret)
account.date_updated = timezone.now()
account.date_change_secret = timezone.now()
account.change_secret_status = ChangeSecretRecordStatusChoice.success
self.summary['ok_accounts'] += 1
self.result['ok_accounts'].append(
{
"asset": str(account.asset),
"username": account.username,
}
)
super().on_host_success(host, result)
with safe_db_connection():
account.save(update_fields=['secret', 'date_updated', 'date_change_secret', 'change_secret_status'])
self.save_record(recorder)
def on_host_error(self, host, error, result):
recorder = self.name_recorder_mapper.get(host)
if not recorder:
return
recorder.status = ChangeSecretRecordStatusChoice.failed.value
recorder.date_finished = timezone.now()
recorder.error = error
account = recorder.account
if not account:
print("Account not found, deleted ?")
return
account.date_updated = timezone.now()
account.date_change_secret = timezone.now()
account.change_secret_status = ChangeSecretRecordStatusChoice.failed
self.summary['fail_accounts'] += 1
self.result['fail_accounts'].append(
{
"asset": str(recorder.asset),
"username": recorder.account.username,
}
)
super().on_host_error(host, error, result)
with safe_db_connection():
account.save(update_fields=['change_secret_status', 'date_change_secret', 'date_updated'])
self.save_record(recorder)

View File

@ -20,6 +20,7 @@
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
recv_timeout: "{{ params.recv_timeout | default(30) }}"
register: ping_info
delegate_to: localhost
@ -39,9 +40,12 @@
name: "{{ account.username }}"
password: "{{ account.secret }}"
commands: "{{ params.commands }}"
first_conn_delay_time: "{{ first_conn_delay_time | default(0.5) }}"
answers: "{{ params.answers }}"
recv_timeout: "{{ params.recv_timeout | default(30) }}"
delay_time: "{{ params.delay_time | default(2) }}"
prompt: "{{ params.prompt | default('.*') }}"
ignore_errors: true
when: ping_info is succeeded
when: ping_info is succeeded and check_conn_after_change
register: change_info
delegate_to: localhost
@ -58,4 +62,6 @@
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
recv_timeout: "{{ params.recv_timeout | default(30) }}"
delegate_to: localhost
when: check_conn_after_change

View File

@ -10,10 +10,30 @@ protocol: ssh
priority: 50
params:
- name: commands
type: list
type: text
label: "{{ 'Params commands label' | trans }}"
default: [ '' ]
default: ''
help_text: "{{ 'Params commands help text' | trans }}"
- name: recv_timeout
type: int
label: "{{ 'Params recv_timeout label' | trans }}"
default: 30
help_text: "{{ 'Params recv_timeout help text' | trans }}"
- name: delay_time
type: int
label: "{{ 'Params delay_time label' | trans }}"
default: 2
help_text: "{{ 'Params delay_time help text' | trans }}"
- name: prompt
type: str
label: "{{ 'Params prompt label' | trans }}"
default: '.*'
help_text: "{{ 'Params prompt help text' | trans }}"
- name: answers
type: text
label: "{{ 'Params answer label' | trans }}"
default: '.*'
help_text: "{{ 'Params answer help text' | trans }}"
i18n:
SSH account change secret:
@ -22,11 +42,91 @@ i18n:
en: 'Custom password change by SSH command line'
Params commands help text:
zh: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 &#123;username&#125;、&#123;password&#125;、&#123;login_password&#125;格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. &#123;login_password&#125;<br />3. configure terminal<br />4. username &#123;username&#125; privilege 0 password &#123;password&#125; <br />5. end'
ja: 'カスタム コマンドに SSH 接続用のアカウント番号、パスワード、ユーザー パスワード フィールドを含める必要がある場合は、<br />&#123;ユーザー名&#125;、&#123;パスワード&#125;、&#123;login_password& を使用してください。 # 125; 形式。タスクの実行時に置き換えられます。 <br />たとえば、Cisco ホストのパスワードを変更するには、通常、次の 5 つのコマンドを設定する必要があります:<br />1.enable<br />2.&#123;login_password&#125;<br />3 .ターミナルの設定<br / >4. ユーザー名 &#123;ユーザー名&#125; 権限 0 パスワード &#123;パスワード&#125; <br />5. 終了'
en: 'If the custom command needs to include the account number, password, and user password field for SSH connection,<br />Please use &#123;username&#125;, &#123;password&#125;, &#123;login_password&# 125; format, which will be replaced when executing the task. <br />For example, to change the password of a Cisco host, you generally need to configure five commands:<br />1. enable<br />2. &#123;login_password&#125;<br />3. configure terminal<br / >4. username &#123;username&#125; privilege 0 password &#123;password&#125; <br />5. end'
zh: |
请将命令中的指定位置改成特殊符号 <br />
1. 改密账号 -> {username} <br />
2. 改密密码 -> {password} <br />
3. 登录用户密码 -> {login_password} <br />
<strong>多条命令使用换行分割,</strong>执行任务时系统会根据特殊符号替换真实数据。<br />
比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />
enable <br />
{login_password} <br />
configure terminal <br />
username {username} privilege 0 password {password} <br />
end <br />
ja: |
コマンド内の指定された位置を特殊記号に変更してください。<br />
新しいパスワード(アカウント変更) -> {username} <br />
新しいパスワード(パスワード変更) -> {password} <br />
ログインユーザーパスワード -> {login_password} <br />
<strong>複数のコマンドは改行で区切り、</strong>タスクを実行するときにシステムは特殊記号を使用して実際のデータを置き換えます。<br />
例えば、Cisco機器のパスワードを変更する場合、一般的には5つのコマンドを設定する必要があります<br />
enable <br />
{login_password} <br />
configure terminal <br />
username {username} privilege 0 password {password} <br />
end <br />
en: |
Please change the specified positions in the command to special symbols. <br />
Change password account -> {username} <br />
Change password -> {password} <br />
Login user password -> {login_password} <br />
<strong>Multiple commands are separated by new lines,</strong> and when executing tasks, <br />
the system will replace the special symbols with real data. <br />
For example, to change the password for a Cisco device, you generally need to configure five commands: <br />
enable <br />
{login_password} <br />
configure terminal <br />
username {username} privilege 0 password {password} <br />
end <br />
Params commands label:
zh: '自定义命令'
ja: 'カスタムコマンド'
en: 'Custom command'
Params recv_timeout label:
zh: '超时时间'
ja: 'タイムアウト'
en: 'Timeout'
Params recv_timeout help text:
zh: '等待命令结果返回的超时时间(秒)'
ja: 'コマンドの結果を待つタイムアウト時間(秒)'
en: 'The timeout for waiting for the command result to return (Seconds)'
Params delay_time label:
zh: '延迟发送时间'
ja: '遅延送信時間'
en: 'Delayed send time'
Params delay_time help text:
zh: '每条命令延迟发送的时间间隔(秒)'
ja: '各コマンド送信の遅延間隔(秒)'
en: 'Time interval for each command delay in sending (Seconds)'
Params prompt label:
zh: '提示符'
ja: 'ヒント'
en: 'Prompt'
Params prompt help text:
zh: '终端连接后显示的提示符信息(正则表达式)'
ja: 'ターミナル接続後に表示されるプロンプト情報(正規表現)'
en: 'Prompt information displayed after terminal connection (Regular expression)'
Params answer label:
zh: '命令结果'
ja: 'コマンド結果'
en: 'Command result'
Params answer help text:
zh: |
根据结果匹配度决定是否执行下一条命令,输入框的内容和上方 “自定义命令” 内容按行一一对应(正则表达式)
ja: |
結果の一致度に基づいて次のコマンドを実行するかどうかを決定します。
入力欄の内容は、上の「カスタムコマンド」の内容と行ごとに対応しています(せいきひょうげん)
en: |
Decide whether to execute the next command based on the result match.
The input content corresponds line by line with the content
of the `Custom command` above. (Regular expression)

View File

@ -1,7 +1,7 @@
- hosts: mongodb
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test MongoDB connection
@ -53,3 +53,4 @@
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: mysql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
db_name: "{{ jms_asset.spec_info.db_name }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
@ -54,3 +54,4 @@
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
filter: version
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: oracle
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test Oracle connection
@ -40,3 +40,4 @@
login_port: "{{ jms_asset.port }}"
login_database: "{{ jms_asset.spec_info.db_name }}"
mode: "{{ account.mode }}"
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: postgre
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
@ -55,3 +55,4 @@
ssl_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: sqlserver
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test SQLServer connection
@ -64,3 +64,4 @@
name: '{{ jms_asset.spec_info.db_name }}'
script: |
SELECT @@version
when: check_conn_after_change

View File

@ -9,7 +9,8 @@
database: passwd
key: "{{ account.username }}"
register: user_info
ignore_errors: yes # 忽略错误如果用户不存在时不会导致playbook失败
failed_when: false
changed_when: false
- name: "Add {{ account.username }} user"
ansible.builtin.user:
@ -18,10 +19,10 @@
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: yes
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1
state: present
when: user_info.failed
when: user_info.msg is defined
- name: "Set {{ account.username }} sudo setting"
ansible.builtin.lineinfile:
@ -31,7 +32,7 @@
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
validate: visudo -cf %s
when:
- user_info.failed or params.modify_sudo
- user_info.msg is defined or params.modify_sudo
- params.sudo
- name: "Change {{ account.username }} password"
@ -40,6 +41,7 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -82,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -100,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -111,5 +116,7 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@ -9,7 +9,8 @@
database: passwd
key: "{{ account.username }}"
register: user_info
ignore_errors: yes # 忽略错误如果用户不存在时不会导致playbook失败
failed_when: false
changed_when: false
- name: "Add {{ account.username }} user"
ansible.builtin.user:
@ -18,10 +19,10 @@
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: yes
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1
state: present
when: user_info.failed
when: user_info.msg is defined
- name: "Set {{ account.username }} sudo setting"
ansible.builtin.lineinfile:
@ -31,7 +32,7 @@
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
validate: visudo -cf %s
when:
- user_info.failed or params.modify_sudo
- user_info.msg is defined or params.modify_sudo
- params.sudo
- name: "Change {{ account.username }} password"
@ -40,6 +41,7 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -82,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -100,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -111,5 +116,7 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@ -4,10 +4,6 @@
- name: Test privileged account
ansible.windows.win_ping:
# - name: Print variables
# debug:
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
- name: Change password
ansible.windows.win_user:
fullname: "{{ account.username}}"
@ -28,4 +24,4 @@
vars:
ansible_user: "{{ account.username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -0,0 +1,27 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Change password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -0,0 +1,27 @@
id: change_secret_ad_windows
name: "{{ 'Windows account change secret' | trans }}"
version: 1
method: change_secret
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account change secret:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
en: 'Using Ansible module win_domain_user to change Windows account secret'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -4,10 +4,6 @@
- name: Test privileged account
ansible.windows.win_ping:
# - name: Print variables
# debug:
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
- name: Change password
ansible.windows.win_user:
fullname: "{{ account.username}}"
@ -31,5 +27,5 @@
login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}"
gateway_args: "{{ jms_gateway | default({}) }}"
when: account.secret_type == "password"
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost

View File

@ -1,218 +1,57 @@
import os
import time
from copy import deepcopy
from django.conf import settings
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from xlsxwriter import Workbook
from accounts.const import AutomationTypes, SecretType, SSHKeyStrategy, SecretStrategy, ChangeSecretRecordStatusChoice
from accounts.models import ChangeSecretRecord, BaseAccountQuerySet
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretFailedMsg
from accounts.const import (
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
)
from accounts.models import ChangeSecretRecord
from accounts.notifications import ChangeSecretExecutionTaskMsg, ChangeSecretReportMsg
from accounts.serializers import ChangeSecretRecordBackUpSerializer
from assets.const import HostTypes
from common.utils import get_logger
from common.utils.file import encrypt_and_compress_zip_file
from common.utils.timezone import local_now_filename
from users.models import User
from ..base.manager import AccountBasePlaybookManager
from ...utils import SecretGenerator
from ..base.manager import BaseChangeSecretPushManager
logger = get_logger(__name__)
class ChangeSecretManager(AccountBasePlaybookManager):
class ChangeSecretManager(BaseChangeSecretPushManager):
ansible_account_prefer = ''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.record_map = self.execution.snapshot.get('record_map', {})
self.secret_type = self.execution.snapshot.get('secret_type')
self.secret_strategy = self.execution.snapshot.get(
'secret_strategy', SecretStrategy.custom
)
self.ssh_key_change_strategy = self.execution.snapshot.get(
'ssh_key_change_strategy', SSHKeyStrategy.add
)
self.account_ids = self.execution.snapshot['accounts']
self.name_recorder_mapper = {} # 做个映射,方便后面处理
@classmethod
def method_type(cls):
return AutomationTypes.change_secret
def get_ssh_params(self, account, secret, secret_type):
kwargs = {}
if secret_type != SecretType.SSH_KEY:
return kwargs
kwargs['strategy'] = self.ssh_key_change_strategy
kwargs['exclusive'] = 'yes' if kwargs['strategy'] == SSHKeyStrategy.set else 'no'
def gen_account_inventory(self, account, asset, h, path_dir):
record = self.get_or_create_record(asset, account, h['name'])
new_secret, private_key_path = self.handle_ssh_secret(account.secret_type, record.new_secret, path_dir)
h = self.gen_inventory(h, account, new_secret, private_key_path, asset)
return h
if kwargs['strategy'] == SSHKeyStrategy.set_jms:
kwargs['regexp'] = '.*{}$'.format(secret.split()[2].strip())
return kwargs
def get_or_create_record(self, asset, account, name):
asset_account_id = f'{asset.id}-{account.id}'
def secret_generator(self, secret_type):
return SecretGenerator(
self.secret_strategy, secret_type,
self.execution.snapshot.get('password_rules')
)
def get_secret(self, secret_type):
if self.secret_strategy == SecretStrategy.custom:
return self.execution.snapshot['secret']
if asset_account_id in self.record_map:
record_id = self.record_map[asset_account_id]
recorder = ChangeSecretRecord.objects.filter(id=record_id).first()
else:
return self.secret_generator(secret_type).get_secret()
new_secret = self.get_secret(account)
recorder = self.create_record(asset, account, new_secret)
def get_accounts(self, privilege_account) -> BaseAccountQuerySet | None:
if not privilege_account:
print('Not privilege account')
return
self.name_recorder_mapper[name] = recorder
return recorder
asset = privilege_account.asset
accounts = asset.accounts.all()
accounts = accounts.filter(id__in=self.account_ids)
if self.secret_type:
accounts = accounts.filter(secret_type=self.secret_type)
if settings.CHANGE_AUTH_PLAN_SECURE_MODE_ENABLED:
accounts = accounts.filter(privileged=False).exclude(
username__in=['root', 'administrator', privilege_account.username]
)
return accounts
def host_callback(
self, host, asset=None, account=None,
automation=None, path_dir=None, **kwargs
):
host = super().host_callback(
host, asset=asset, account=account, automation=automation,
path_dir=path_dir, **kwargs
def create_record(self, asset, account, new_secret):
recorder = ChangeSecretRecord(
asset=asset, account=account, execution=self.execution,
old_secret=account.secret, new_secret=new_secret,
comment=f'{account.username}@{asset.address}'
)
if host.get('error'):
return host
accounts = self.get_accounts(account)
error_msg = _("No pending accounts found")
if not accounts:
print(f'{asset}: {error_msg}')
return []
records = []
inventory_hosts = []
if asset.type == HostTypes.WINDOWS and self.secret_type == SecretType.SSH_KEY:
print(f'Windows {asset} does not support ssh key push')
return inventory_hosts
if asset.type == HostTypes.WINDOWS:
accounts = accounts.filter(secret_type=SecretType.PASSWORD)
host['ssh_params'] = {}
for account in accounts:
h = deepcopy(host)
secret_type = account.secret_type
h['name'] += '(' + account.username + ')'
if self.secret_type is None:
new_secret = account.secret
else:
new_secret = self.get_secret(secret_type)
if new_secret is None:
print(f'new_secret is None, account: {account}')
continue
asset_account_id = f'{asset.id}-{account.id}'
if asset_account_id not in self.record_map:
recorder = ChangeSecretRecord(
asset=asset, account=account, execution=self.execution,
old_secret=account.secret, new_secret=new_secret,
comment=f'{account.username}@{asset.address}'
)
records.append(recorder)
else:
record_id = self.record_map[asset_account_id]
try:
recorder = ChangeSecretRecord.objects.get(id=record_id)
except ChangeSecretRecord.DoesNotExist:
print(f"Record {record_id} not found")
continue
self.name_recorder_mapper[h['name']] = recorder
private_key_path = None
if secret_type == SecretType.SSH_KEY:
private_key_path = self.generate_private_key_path(new_secret, path_dir)
new_secret = self.generate_public_key(new_secret)
h['ssh_params'].update(self.get_ssh_params(account, new_secret, secret_type))
h['account'] = {
'name': account.name,
'username': account.username,
'secret_type': secret_type,
'secret': account.escape_jinja2_syntax(new_secret),
'private_key_path': private_key_path,
'become': account.get_ansible_become_auth(),
}
if asset.platform.type == 'oracle':
h['account']['mode'] = 'sysdba' if account.privileged else None
inventory_hosts.append(h)
ChangeSecretRecord.objects.bulk_create(records)
return inventory_hosts
@staticmethod
def require_update_version(account, recorder):
return account.secret != recorder.new_secret
def on_host_success(self, host, result):
recorder = self.name_recorder_mapper.get(host)
if not recorder:
return
recorder.status = ChangeSecretRecordStatusChoice.success.value
recorder.date_finished = timezone.now()
account = recorder.account
if not account:
print("Account not found, deleted ?")
return
version_update_required = self.require_update_version(account, recorder)
account.secret = recorder.new_secret
account.date_updated = timezone.now()
max_retries = 3
retry_count = 0
while retry_count < max_retries:
try:
recorder.save()
account_update_fields = ['secret', 'date_updated']
if version_update_required:
account_update_fields.append('version')
account.save(update_fields=account_update_fields)
break
except Exception as e:
retry_count += 1
if retry_count == max_retries:
self.on_host_error(host, str(e), result)
else:
print(f'retry {retry_count} times for {host} recorder save error: {e}')
time.sleep(1)
def on_host_error(self, host, error, result):
recorder = self.name_recorder_mapper.get(host)
if not recorder:
return
recorder.status = ChangeSecretRecordStatusChoice.failed.value
recorder.date_finished = timezone.now()
recorder.error = error
try:
recorder.save()
except Exception as e:
print(f"\033[31m Save {host} recorder error: {e} \033[0m\n")
def on_runner_failed(self, runner, e):
logger.error("Account error: ", e)
return recorder
def check_secret(self):
if self.secret_strategy == SecretStrategy.custom \
@ -230,47 +69,39 @@ class ChangeSecretManager(AccountBasePlaybookManager):
else:
failed += 1
total += 1
summary = _('Success: %s, Failed: %s, Total: %s') % (succeed, failed, total)
return summary
def run(self, *args, **kwargs):
if self.secret_type and not self.check_secret():
self.execution.status = 'success'
self.execution.date_finished = timezone.now()
self.execution.save()
return
super().run(*args, **kwargs)
def print_summary(self):
recorders = list(self.name_recorder_mapper.values())
summary = self.get_summary(recorders)
print(summary, end='')
print('\n\n' + '-' * 80)
plan_execution_end = _('Plan execution end')
print('{} {}\n'.format(plan_execution_end, local_now_filename()))
time_cost = _('Duration')
print('{}: {}s'.format(time_cost, self.duration))
print(summary)
def send_report_if_need(self, *args, **kwargs):
if self.secret_type and not self.check_secret():
return
recorders = list(self.name_recorder_mapper.values())
if self.record_map:
return
failed_recorders = [
r for r in recorders
if r.status == ChangeSecretRecordStatusChoice.failed.value
]
recipients = self.execution.recipients
recipients = User.objects.filter(id__in=list(recipients.keys()))
if not recipients:
return
if failed_recorders:
name = self.execution.snapshot.get('name')
execution_id = str(self.execution.id)
_ids = [r.id for r in failed_recorders]
asset_account_errors = ChangeSecretRecord.objects.filter(
id__in=_ids).values_list('asset__name', 'account__username', 'error')
for user in recipients:
ChangeSecretFailedMsg(name, execution_id, user, asset_account_errors).publish()
context = self.get_report_context()
for user in recipients:
ChangeSecretReportMsg(user, context).publish()
if not recorders:
return
summary = self.get_summary(recorders)
self.send_recorder_mail(recipients, recorders, summary)
def send_recorder_mail(self, recipients, recorders, summary):
@ -307,3 +138,6 @@ class ChangeSecretManager(AccountBasePlaybookManager):
ws.write_string(row_index, col_index, col_data)
wb.close()
return True
def get_report_template(self):
return "accounts/change_secret_report.html"

View File

@ -0,0 +1,78 @@
#!/usr/bin/env python
#
import re
import sqlite3
import sys
def is_weak_password(password):
if len(password) < 8:
return True
# 判断是否只有一种字符类型
if password.isdigit() or password.isalpha():
return True
# 判断是否只包含数字或字母
if password.islower() or password.isupper():
return True
# 判断是否包含常见弱密码
common_passwords = ["123456", "password", "12345678", "qwerty", "abc123"]
if password.lower() in common_passwords:
return True
# 正则表达式判断字符多样性(数字、字母、特殊字符)
if (
not re.search(r"[A-Za-z]", password)
or not re.search(r"[0-9]", password)
or not re.search(r"[\W_]", password)
):
return True
return False
def parse_it(fname):
count = 0
lines = []
with open(fname, 'rb') as f:
for line in f:
try:
line = line.decode().strip()
except UnicodeDecodeError:
continue
if len(line) > 32:
continue
if is_weak_password(line):
continue
lines.append(line)
count += 0
print(line)
return lines
def insert_to_db(lines):
conn = sqlite3.connect('./leak_passwords.db')
cursor = conn.cursor()
create_table_sql = '''
CREATE TABLE IF NOT EXISTS passwords (
id INTEGER PRIMARY KEY AUTOINCREMENT,
password CHAR(32)
)
'''
create_index_sql = 'CREATE INDEX IF NOT EXISTS idx_password ON passwords(password)'
cursor.execute(create_table_sql)
cursor.execute(create_index_sql)
for line in lines:
cursor.execute('INSERT INTO passwords (password) VALUES (?)', [line])
conn.commit()
if __name__ == '__main__':
filename = sys.argv[1]
lines = parse_it(filename)
insert_to_db(lines)

View File

@ -0,0 +1,283 @@
import hashlib
import os
import re
import sqlite3
import uuid
from django.conf import settings
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from accounts.models import Account, AccountRisk, RiskChoice
from assets.automations.base.manager import BaseManager
from common.const import ConfirmOrIgnore
from common.decorators import bulk_create_decorator, bulk_update_decorator
@bulk_create_decorator(AccountRisk)
def create_risk(data):
return AccountRisk(**data)
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
def update_risk(risk):
return risk
class BaseCheckHandler:
risk = ''
def __init__(self, assets):
self.assets = assets
def check(self, account):
pass
def clean(self):
pass
class CheckSecretHandler(BaseCheckHandler):
risk = RiskChoice.weak_password
@staticmethod
def is_weak_password(password):
# 判断密码长度
if len(password) < 8:
return True
# 判断是否只有一种字符类型
if password.isdigit() or password.isalpha():
return True
# 判断是否只包含数字或字母
if password.islower() or password.isupper():
return True
# 判断是否包含常见弱密码
common_passwords = ["123456", "password", "12345678", "qwerty", "abc123"]
if password.lower() in common_passwords:
return True
# 正则表达式判断字符多样性(数字、字母、特殊字符)
if (
not re.search(r"[A-Za-z]", password)
or not re.search(r"[0-9]", password)
or not re.search(r"[\W_]", password)
):
return True
return False
def check(self, account):
if not account.secret:
return False
return self.is_weak_password(account.secret)
class CheckRepeatHandler(BaseCheckHandler):
risk = RiskChoice.repeated_password
def __init__(self, assets):
super().__init__(assets)
self.path, self.conn, self.cursor = self.init_repeat_check_db()
self.add_password_for_check_repeat()
@staticmethod
def init_repeat_check_db():
path = os.path.join('/tmp', 'accounts_' + str(uuid.uuid4()) + '.db')
sql = """
CREATE TABLE IF NOT EXISTS accounts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
digest CHAR(32)
)
"""
index = "CREATE INDEX IF NOT EXISTS idx_digest ON accounts(digest)"
conn = sqlite3.connect(path)
cursor = conn.cursor()
cursor.execute(sql)
cursor.execute(index)
return path, conn, cursor
def check(self, account):
if not account.secret:
return False
digest = self.digest(account.secret)
sql = 'SELECT COUNT(*) FROM accounts WHERE digest = ?'
self.cursor.execute(sql, [digest])
result = self.cursor.fetchone()
if not result:
return False
return result[0] > 1
@staticmethod
def digest(secret):
return hashlib.md5(secret.encode()).hexdigest()
def add_password_for_check_repeat(self):
accounts = Account.objects.all().only('id', '_secret', 'secret_type')
sql = "INSERT INTO accounts (digest) VALUES (?)"
for account in accounts:
secret = account.secret
if not secret:
continue
digest = self.digest(secret)
self.cursor.execute(sql, [digest])
self.conn.commit()
def clean(self):
self.cursor.close()
self.conn.close()
os.remove(self.path)
class CheckLeakHandler(BaseCheckHandler):
risk = RiskChoice.leaked_password
def __init__(self, *args):
super().__init__(*args)
self.conn, self.cursor = self.init_leak_password_db()
@staticmethod
def init_leak_password_db():
db_path = os.path.join(
settings.APPS_DIR, 'accounts', 'automations',
'check_account', 'leak_passwords.db'
)
if settings.LEAK_PASSWORD_DB_PATH and os.path.isfile(settings.LEAK_PASSWORD_DB_PATH):
db_path = settings.LEAK_PASSWORD_DB_PATH
db_conn = sqlite3.connect(db_path)
db_cursor = db_conn.cursor()
return db_conn, db_cursor
def check(self, account):
if not account.secret:
return False
sql = 'SELECT 1 FROM passwords WHERE password = ? LIMIT 1'
self.cursor.execute(sql, (account.secret,))
leak = self.cursor.fetchone() is not None
return leak
def clean(self):
self.cursor.close()
self.conn.close()
class CheckAccountManager(BaseManager):
batch_size = 100
tmpl = 'Checked the status of account %s: %s'
def __init__(self, execution):
super().__init__(execution)
self.assets = []
self.batch_risks = []
self.handlers = []
def add_risk(self, risk, account):
self.summary[risk] += 1
self.result[risk].append({
'asset': str(account.asset), 'username': account.username,
})
risk_obj = {'account': account, 'risk': risk}
self.batch_risks.append(risk_obj)
def commit_risks(self, assets):
account_risks = AccountRisk.objects.filter(asset__in=assets)
ori_risk_map = {}
for risk in account_risks:
key = f'{risk.account_id}_{risk.risk}'
ori_risk_map[key] = risk
now = timezone.now().isoformat()
for d in self.batch_risks:
account = d["account"]
key = f'{account.id}_{d["risk"]}'
origin_risk = ori_risk_map.get(key)
if origin_risk and origin_risk.status != ConfirmOrIgnore.pending:
details = origin_risk.details or []
details.append({"datetime": now, 'type': 'refind'})
if len(details) > 10:
details = [*details[:5], *details[-5:]]
origin_risk.details = details
origin_risk.status = ConfirmOrIgnore.pending
update_risk(origin_risk)
else:
create_risk({
"account": account,
"asset": account.asset,
"username": account.username,
"risk": d["risk"],
"details": [{"datetime": now, 'type': 'init'}],
})
def pre_run(self):
super().pre_run()
self.assets = self.execution.get_all_assets()
def batch_check(self, handler):
print("Engine: {}".format(handler.__class__.__name__))
for i in range(0, len(self.assets), self.batch_size):
_assets = self.assets[i: i + self.batch_size]
accounts = Account.objects.filter(asset__in=_assets)
print("Start to check accounts: {}".format(len(accounts)))
for account in accounts:
error = handler.check(account)
msg = handler.risk if error else 'ok'
print("Check: {} => {}".format(account, msg))
if not error:
continue
self.add_risk(handler.risk, account)
self.commit_risks(_assets)
def do_run(self, *args, **kwargs):
engines = self.execution.snapshot.get("engines", [])
if engines == '__all__':
engines = ['check_account_secret', 'check_account_repeat', 'check_account_leak']
for engine in engines:
if engine == "check_account_secret":
handler = CheckSecretHandler(self.assets)
elif engine == "check_account_repeat":
handler = CheckRepeatHandler(self.assets)
elif engine == "check_account_leak":
handler = CheckLeakHandler(self.assets)
else:
print("Unknown engine: {}".format(engine))
continue
self.handlers.append(handler)
self.batch_check(handler)
def post_run(self):
super().post_run()
for handler in self.handlers:
handler.clean()
def get_report_subject(self):
return "Check account report of %s" % self.execution.id
def get_report_template(self):
return "accounts/check_account_report.html"
def print_summary(self):
tmpl = _("---\nSummary: \nok: {}, weak password: {}, leaked password: {}, "
"repeated password: {}, no secret: {}, using time: {}s").format(
self.summary["ok"],
self.summary[RiskChoice.weak_password],
self.summary[RiskChoice.leaked_password],
self.summary[RiskChoice.repeated_password],
self.summary["no_secret"],
self.duration
)
print(tmpl)

View File

@ -1,6 +1,7 @@
from .backup_account.manager import AccountBackupManager
from .change_secret.manager import ChangeSecretManager
from .gather_accounts.manager import GatherAccountsManager
from .check_account.manager import CheckAccountManager
from .gather_account.manager import GatherAccountsManager
from .push_account.manager import PushAccountManager
from .remove_account.manager import RemoveAccountManager
from .verify_account.manager import VerifyAccountManager
@ -16,8 +17,8 @@ class ExecutionManager:
AutomationTypes.remove_account: RemoveAccountManager,
AutomationTypes.gather_accounts: GatherAccountsManager,
AutomationTypes.verify_gateway_account: VerifyGatewayAccountManager,
# TODO 后期迁移到自动化策略中
'backup_account': AccountBackupManager,
AutomationTypes.check_account: CheckAccountManager,
AutomationTypes.backup_account: AccountBackupManager,
}
def __init__(self, execution):
@ -26,3 +27,6 @@ class ExecutionManager:
def run(self, *args, **kwargs):
return self._runner.run(*args, **kwargs)
def __getattr__(self, item):
return getattr(self._runner, item)

View File

@ -1,7 +1,7 @@
- hosts: mongodb
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Get info
@ -15,7 +15,7 @@
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
filter: users
register: db_info

View File

@ -1,7 +1,7 @@
- hosts: mysql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

View File

@ -1,7 +1,7 @@
- hosts: oralce
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Get info

View File

@ -1,7 +1,7 @@
- hosts: postgresql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

View File

@ -0,0 +1,43 @@
- hosts: sqlserver
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test SQLServer connection
community.general.mssql_script:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: '{{ jms_asset.spec_info.db_name }}'
script: |
SELECT
l.name,
l.modify_date,
l.is_disabled,
l.create_date,
l.default_database_name,
LOGINPROPERTY(name, 'DaysUntilExpiration') AS days_until_expiration,
MAX(s.login_time) AS last_login_time
FROM
sys.sql_logins l
LEFT JOIN
sys.dm_exec_sessions s
ON
l.name = s.login_name
WHERE
s.is_user_process = 1 OR s.login_name IS NULL
GROUP BY
l.name, l.create_date, l.modify_date, l.is_disabled, l.default_database_name
ORDER BY
last_login_time DESC;
output: dict
register: db_info
- name: Define info by set_fact
set_fact:
info: "{{ db_info.query_results_dict }}"
- debug:
var: info

View File

@ -0,0 +1,10 @@
id: gather_accounts_sqlserver
name: "{{ 'SQLServer account gather' | trans }}"
category: database
type:
- sqlserver
method: gather_accounts
i18n:
SQLServer account gather:
zh: SQLServer 账号收集
ja: SQLServer アカウントの収集

View File

@ -0,0 +1,270 @@
from datetime import datetime
from django.utils import timezone
__all__ = ['GatherAccountsFilter']
def parse_date(date_str, default=None):
if not date_str:
return default
if date_str in ['Never', 'null']:
return default
formats = [
'%Y/%m/%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%d-%m-%Y %H:%M:%S',
'%Y/%m/%d',
'%d-%m-%Y',
]
for fmt in formats:
try:
dt = datetime.strptime(date_str, fmt)
return timezone.make_aware(dt, timezone.get_current_timezone())
except ValueError:
continue
return default
class GatherAccountsFilter:
def __init__(self, tp):
self.tp = tp
@staticmethod
def mysql_filter(info):
result = {}
for host, user_dict in info.items():
for username, user_info in user_dict.items():
password_last_changed = parse_date(user_info.get('password_last_changed'))
password_lifetime = user_info.get('password_lifetime')
user = {
'username': username,
'date_password_change': password_last_changed,
'date_password_expired': password_last_changed + timezone.timedelta(
days=password_lifetime) if password_last_changed and password_lifetime else None,
'date_last_login': None,
'groups': '',
}
result[username] = user
return result
@staticmethod
def postgresql_filter(info):
result = {}
for username, user_info in info.items():
user = {
'username': username,
'date_password_change': None,
'date_password_expired': parse_date(user_info.get('valid_until')),
'date_last_login': None,
'groups': '',
}
detail = {
'can_login': user_info.get('canlogin'),
'superuser': user_info.get('superuser'),
}
user['detail'] = detail
result[username] = user
return result
@staticmethod
def sqlserver_filter(info):
if not info:
return {}
result = {}
for user_info in info[0][0]:
days_until_expiration = user_info.get('days_until_expiration')
date_password_expired = timezone.now() + timezone.timedelta(
days=int(days_until_expiration)) if days_until_expiration else None
user = {
'username': user_info.get('name', ''),
'date_password_change': parse_date(user_info.get('modify_date')),
'date_password_expired': date_password_expired,
'date_last_login': parse_date(user_info.get('last_login_time')),
'groups': '',
}
detail = {
'create_date': user_info.get('create_date', ''),
'is_disabled': user_info.get('is_disabled', ''),
'default_database_name': user_info.get('default_database_name', ''),
}
user['detail'] = detail
result[user['username']] = user
return result
@staticmethod
def oracle_filter(info):
result = {}
for default_tablespace, users in info.items():
for username, user_info in users.items():
user = {
'username': username,
'date_password_change': parse_date(user_info.get('password_change_date')),
'date_password_expired': parse_date(user_info.get('expiry_date')),
'date_last_login': parse_date(user_info.get('last_login')),
'groups': '',
}
detail = {
'uid': user_info.get('user_id', ''),
'create_date': user_info.get('created', ''),
'account_status': user_info.get('account_status', ''),
'default_tablespace': default_tablespace,
'roles': user_info.get('roles', []),
'privileges': user_info.get('privileges', []),
}
user['detail'] = detail
result[user['username']] = user
return result
@staticmethod
def posix_filter(info):
user_groups = info.pop('user_groups', [])
username_groups = {}
for line in user_groups:
if ':' not in line:
continue
username, groups = line.split(':', 1)
username_groups[username.strip()] = groups.strip()
user_sudo = info.pop('user_sudo', [])
username_sudo = {}
for line in user_sudo:
if ':' not in line:
continue
username, sudo = line.split(':', 1)
if not sudo.strip():
continue
username_sudo[username.strip()] = sudo.strip()
last_login = info.pop('last_login', '')
user_last_login = {}
for line in last_login:
if not line.strip() or ' ' not in line:
continue
username, login = line.split(' ', 1)
user_last_login[username] = login.split()
user_authorized = info.pop('user_authorized', [])
username_authorized = {}
for line in user_authorized:
if ':' not in line:
continue
username, authorized = line.split(':', 1)
username_authorized[username.strip()] = authorized.strip()
passwd_date = info.pop('passwd_date', [])
username_password_date = {}
for line in passwd_date:
if ':' not in line:
continue
username, password_date = line.split(':', 1)
username_password_date[username.strip()] = password_date.strip().split()
result = {}
users = info.pop('users', '')
for username in users:
if not username:
continue
user = dict()
login = user_last_login.get(username) or ''
if login and len(login) == 3:
user['address_last_login'] = login[0][:32]
try:
login_date = timezone.datetime.fromisoformat(login[1])
user['date_last_login'] = login_date
except ValueError:
pass
start_date = timezone.make_aware(timezone.datetime(1970, 1, 1))
_password_date = username_password_date.get(username) or ''
if _password_date and len(_password_date) == 2:
if _password_date[0]:
user['date_password_change'] = start_date + timezone.timedelta(days=int(_password_date[0]))
if _password_date[1]:
user['date_password_expired'] = start_date + timezone.timedelta(days=int(_password_date[1]))
detail = {
'groups': username_groups.get(username) or '',
'sudoers': username_sudo.get(username) or '',
'authorized_keys': username_authorized.get(username) or ''
}
user['detail'] = detail
result[username] = user
return result
@staticmethod
def windows_filter(info):
result = {}
for user_details in info['user_details']:
user_info = {}
lines = user_details['stdout_lines']
for line in lines:
if not line.strip():
continue
parts = line.split(' ', 1)
if len(parts) == 2:
key, value = parts
user_info[key.strip()] = value.strip()
detail = {'groups': user_info.get('Global Group memberships', ''), }
username = user_info.get('User name')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('Password last set')),
'date_password_expired': parse_date(user_info.get('Password expires')),
'date_last_login': parse_date(user_info.get('Last logon')),
'groups': detail,
}
return result
@staticmethod
def windows_ad_filter(info):
result = {}
for user_info in info['user_details']:
detail = {'groups': user_info.get('GlobalGroupMemberships', ''), }
username = user_info.get('SamAccountName')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('PasswordLastSet')),
'date_password_expired': parse_date(user_info.get('PasswordExpires')),
'date_last_login': parse_date(user_info.get('LastLogonDate')),
'groups': detail,
}
return result
@staticmethod
def mongodb_filter(info):
result = {}
for db, users in info.items():
for username, user_info in users.items():
user = {
'username': username,
'date_password_change': None,
'date_password_expired': None,
'date_last_login': None,
'groups': '',
}
result['detail'] = {'db': db, 'roles': user_info.get('roles', [])}
result[username] = user
return result
def run(self, method_id_meta_mapper, info):
run_method_name = None
for k, v in method_id_meta_mapper.items():
if self.tp not in v['type']:
continue
run_method_name = k.replace(f'{v["method"]}_', '')
if not run_method_name:
return info
if hasattr(self, f'{run_method_name}_filter'):
return getattr(self, f'{run_method_name}_filter')(info)
return info

View File

@ -0,0 +1,61 @@
- hosts: demo
gather_facts: no
tasks:
- name: Get users
ansible.builtin.shell:
cmd: >
getent passwd | awk -F: '$7 !~ /(false|nologin|true|sync)$/' | grep -v '^$' | awk -F":" '{ print $1 }'
register: users
- name: Gather posix account last login
ansible.builtin.shell: |
for user in {{ users.stdout_lines | join(" ") }}; do
last -wi --time-format iso -n 1 ${user} | awk '{ print $1,$3,$4, $NF }' | head -1 | awk 'NF'
done
register: last_login
- name: Get user password change date and expiry
ansible.builtin.shell: |
for user in {{ users.stdout_lines | join(" ") }}; do
k=$(getent shadow $user | awk -F: '{ print $3, $5 }')
echo "$user:$k"
done
register: passwd_date
- name: Get user groups
ansible.builtin.shell: |
for user in {{ users.stdout_lines | join(" ") }}; do
echo "$(groups $user)" | sed 's@ : @:@g'
done
register: user_groups
- name: Get sudoers
ansible.builtin.shell: |
for user in {{ users.stdout_lines | join(" ") }}; do
echo "$user: $(grep "^$user " /etc/sudoers | tr '\n' ';' || echo '')"
done
register: user_sudo
- name: Get authorized keys
ansible.builtin.shell: |
for user in {{ users.stdout_lines | join(" ") }}; do
home=$(getent passwd $user | cut -d: -f6)
echo -n "$user:"
if [ -f "${home}/.ssh/authorized_keys" ]; then
cat ${home}/.ssh/authorized_keys | tr '\n' ';'
fi
echo
done
register: user_authorized
- set_fact:
info:
users: "{{ users.stdout_lines }}"
last_login: "{{ last_login.stdout_lines }}"
user_groups: "{{ user_groups.stdout_lines }}"
user_sudo: "{{ user_sudo.stdout_lines }}"
user_authorized: "{{ user_authorized.stdout_lines }}"
passwd_date: "{{ passwd_date.stdout_lines }}"
- debug:
var: info

View File

@ -0,0 +1,33 @@
- hosts: demo
gather_facts: no
tasks:
- name: Run net user command to get all users
win_shell: net user
register: user_list_output
failed_when: false
- name: Parse all users from net user command
set_fact:
all_users: >-
{%- set users = [] -%}
{%- for line in user_list_output.stdout_lines -%}
{%- if loop.index > 4 and line.strip() != "" and not line.startswith("The command completed") -%}
{%- for user in line.split() -%}
{%- set _ = users.append(user) -%}
{%- endfor -%}
{%- endif -%}
{%- endfor -%}
{{ users }}
- name: Run net user command for each user to get details
win_shell: net user {{ item }}
loop: "{{ all_users }}"
register: user_details
ignore_errors: yes
- set_fact:
info:
user_details: "{{ user_details.results }}"
- debug:
var: info

View File

@ -2,10 +2,13 @@ id: gather_accounts_windows
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category: host
category:
- host
type:
- windows
i18n:
Windows account gather:
zh: 使用命令 net user 收集 Windows 账号

View File

@ -0,0 +1,74 @@
- hosts: demo
gather_facts: no
tasks:
- name: Import ActiveDirectory module
win_shell: Import-Module ActiveDirectory
args:
warn: false
- name: Get the SamAccountName list of all AD users
win_shell: |
Import-Module ActiveDirectory
Get-ADUser -Filter * | Select-Object -ExpandProperty SamAccountName
register: ad_user_list
- name: Set the all_users variable
set_fact:
all_users: "{{ ad_user_list.stdout_lines }}"
- name: Get detailed information for each user
win_shell: |
Import-Module ActiveDirectory
$user = Get-ADUser -Identity {{ item }} -Properties Name, SamAccountName, Enabled, LastLogonDate, PasswordLastSet, msDS-UserPasswordExpiryTimeComputed, MemberOf
$globalGroups = @()
if ($user.MemberOf) {
$globalGroups = $user.MemberOf | ForEach-Object {
try {
$group = Get-ADGroup $_ -ErrorAction Stop
if ($group.GroupScope -eq 'Global') { $group.Name }
} catch {
}
}
}
$passwordExpiry = $null
$expiryRaw = $user.'msDS-UserPasswordExpiryTimeComputed'
if ($expiryRaw) {
try {
$passwordExpiry = [datetime]::FromFileTime($expiryRaw)
} catch {
$passwordExpiry = $null
}
}
$output = [PSCustomObject]@{
Name = $user.Name
SamAccountName = $user.SamAccountName
Enabled = $user.Enabled
LastLogonDate = if ($user.LastLogonDate) { $user.LastLogonDate.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordLastSet = if ($user.PasswordLastSet) { $user.PasswordLastSet.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordExpires = if ($passwordExpiry) { $passwordExpiry.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
GlobalGroupMemberships = $globalGroups
}
$output | ConvertTo-Json -Depth 3
loop: "{{ all_users }}"
register: ad_user_details
ignore_errors: yes
- set_fact:
info:
user_details: >-
{{
ad_user_details.results
| selectattr('rc', 'equalto', 0)
| map(attribute='stdout')
| select('truthy')
| map('from_json')
}}
- debug:
var: info

View File

@ -0,0 +1,15 @@
id: gather_accounts_windows_ad
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category:
- ds
type:
- windows_ad
i18n:
Windows account gather:
zh: 使用命令 Get-ADUser 收集 Windows 账号
ja: コマンド Get-ADUser を使用して Windows アカウントを収集する
en: Using command Get-ADUser to gather accounts

View File

@ -0,0 +1,409 @@
import time
from collections import defaultdict
from django.utils import timezone
from accounts.const import AutomationTypes
from accounts.models import GatheredAccount, Account, AccountRisk, RiskChoice
from common.const import ConfirmOrIgnore
from common.decorators import bulk_create_decorator, bulk_update_decorator
from common.utils import get_logger
from common.utils.strings import get_text_diff
from orgs.utils import tmp_to_org
from .filter import GatherAccountsFilter
from ..base.manager import AccountBasePlaybookManager
logger = get_logger(__name__)
risk_items = [
"authorized_keys",
"sudoers",
"groups",
]
common_risk_items = [
"address_last_login",
"date_last_login",
"date_password_change",
"date_password_expired",
"detail"
]
diff_items = risk_items + common_risk_items
def format_datetime(value):
if isinstance(value, timezone.datetime):
return value.strftime("%Y-%m-%d %H:%M:%S")
return value
def get_items_diff(ori_account, d):
if hasattr(ori_account, "_diff"):
return ori_account._diff
diff = {}
for item in diff_items:
get_item_diff(item, ori_account, d, diff)
ori_account._diff = diff
return diff
def get_item_diff(item, ori_account, d, diff):
detail = getattr(ori_account, 'detail', {})
new_detail = d.get('detail', {})
ori = getattr(ori_account, item, None) or detail.get(item)
new = d.get(item, "") or new_detail.get(item)
if not ori and not new:
return
ori = format_datetime(ori)
new = format_datetime(new)
if new != ori:
diff[item] = get_text_diff(str(ori), str(new))
class AnalyseAccountRisk:
long_time = timezone.timedelta(days=90)
datetime_check_items = [
{"field": "date_last_login", "risk": "long_time_no_login", "delta": long_time},
{
"field": "date_password_change",
"risk": RiskChoice.long_time_password,
"delta": long_time,
},
{
"field": "date_password_expired",
"risk": "password_expired",
"delta": timezone.timedelta(seconds=1),
},
]
def __init__(self, check_risk=True):
self.check_risk = check_risk
self.now = timezone.now()
self.pending_add_risks = []
def _analyse_item_changed(self, ori_ga, d):
diff = get_items_diff(ori_ga, d)
if not diff:
return
risks = []
for k, v in diff.items():
if k not in risk_items:
continue
risks.append(
dict(
asset_id=str(ori_ga.asset_id),
username=ori_ga.username,
gathered_account=ori_ga,
risk=k + "_changed",
detail={"diff": v},
)
)
self.save_or_update_risks(risks)
def _analyse_datetime_changed(self, ori_account, d, asset, username):
basic = {"asset_id": str(asset.id), "username": username}
risks = []
for item in self.datetime_check_items:
field = item["field"]
risk = item["risk"]
delta = item["delta"]
date = d.get(field)
if not date:
continue
# 服务器收集的时间和数据库时间一致,不进行比较,无法检测风险 不太对,先注释
# pre_date = ori_account and getattr(ori_account, field)
# if pre_date == date:
# continue
if date and date < timezone.now() - delta:
risks.append(
dict(**basic, risk=risk, detail={"date": date.isoformat()})
)
self.save_or_update_risks(risks)
def save_or_update_risks(self, risks):
# 提前取出来,避免每次都查数据库
asset_ids = {r["asset_id"] for r in risks}
assets_risks = AccountRisk.objects.filter(asset_id__in=asset_ids)
assets_risks = {f"{r.asset_id}_{r.username}_{r.risk}": r for r in assets_risks}
for d in risks:
detail = d.pop("detail", {})
detail["datetime"] = self.now.isoformat()
key = f"{d['asset_id']}_{d['username']}_{d['risk']}"
found = assets_risks.get(key)
if not found:
self._create_risk(dict(**d, details=[detail]))
continue
found.details.append(detail)
self._update_risk(found)
@bulk_create_decorator(AccountRisk)
def _create_risk(self, data):
return AccountRisk(**data)
@bulk_update_decorator(AccountRisk, update_fields=["details"])
def _update_risk(self, account):
return account
def lost_accounts(self, asset, lost_users):
if not self.check_risk:
return
for user in lost_users:
self._create_risk(
dict(
asset_id=str(asset.id),
username=user,
risk=RiskChoice.account_deleted,
details=[{"datetime": self.now.isoformat()}],
)
)
def analyse_risk(self, asset, ga, d, sys_found):
if not self.check_risk:
return
if ga:
self._analyse_item_changed(ga, d)
if not sys_found:
basic = {"asset": asset, "username": d["username"], 'gathered_account': ga}
self._create_risk(
dict(
**basic,
risk=RiskChoice.new_found,
details=[{"datetime": self.now.isoformat()}],
)
)
self._analyse_datetime_changed(ga, d, asset, d["username"])
class GatherAccountsManager(AccountBasePlaybookManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.host_asset_mapper = {}
self.asset_account_info = {}
self.asset_usernames_mapper = defaultdict(set)
self.ori_asset_usernames = defaultdict(set)
self.ori_gathered_usernames = defaultdict(set)
self.ori_gathered_accounts_mapper = dict()
self.is_sync_account = self.execution.snapshot.get("is_sync_account")
self.check_risk = self.execution.snapshot.get("check_risk", False)
@classmethod
def method_type(cls):
return AutomationTypes.gather_accounts
def host_callback(self, host, asset=None, **kwargs):
super().host_callback(host, asset=asset, **kwargs)
self.host_asset_mapper[host["name"]] = asset
return host
def _filter_success_result(self, tp, result):
result = GatherAccountsFilter(tp).run(self.method_id_meta_mapper, result)
return result
@staticmethod
def _get_nested_info(data, *keys):
for key in keys:
data = data.get(key, {})
if not data:
break
return data
def _collect_asset_account_info(self, asset, info):
result = self._filter_success_result(asset.type, info)
accounts = []
for username, info in result.items():
self.asset_usernames_mapper[str(asset.id)].add(username)
d = {"asset": asset, "username": username, "remote_present": True, **info}
accounts.append(d)
self.asset_account_info[asset] = accounts
def on_host_success(self, host, result):
super().on_host_success(host, result)
info = self._get_nested_info(result, "debug", "res", "info")
asset = self.host_asset_mapper.get(host)
if asset and info:
self._collect_asset_account_info(asset, info)
else:
print(f"\033[31m Not found {host} info \033[0m\n")
def prefetch_origin_account_usernames(self):
"""
提起查出来避免每次 sql 查询
:return:
"""
assets = self.asset_usernames_mapper.keys()
accounts = Account.objects.filter(asset__in=assets).values_list(
"asset", "username"
)
for asset_id, username in accounts:
self.ori_asset_usernames[str(asset_id)].add(username)
ga_accounts = GatheredAccount.objects.filter(asset__in=assets)
for account in ga_accounts:
self.ori_gathered_usernames[str(account.asset_id)].add(account.username)
key = "{}_{}".format(account.asset_id, account.username)
self.ori_gathered_accounts_mapper[key] = account
def update_gather_accounts_status(self, asset):
"""
远端账号收集中的账号vault 中的账号
要根据账号新增见啥标识 收集账号的状态, 让管理员关注
远端账号 -> 收集账号 -> 特权账号
"""
remote_users = self.asset_usernames_mapper[str(asset.id)]
ori_users = self.ori_asset_usernames[str(asset.id)]
ori_ga_users = self.ori_gathered_usernames[str(asset.id)]
queryset = GatheredAccount.objects.filter(asset=asset).exclude(
status=ConfirmOrIgnore.ignored
)
# 远端账号 比 收集账号多的
# 新增创建,不用处理状态
new_found_users = remote_users - ori_ga_users
if new_found_users:
self.summary["new_accounts"] += len(new_found_users)
for username in new_found_users:
self.result["new_accounts"].append(
{
"asset": str(asset),
"username": username,
}
)
# 远端上 比 收集账号少的
# 标识 remote_present=False, 标记为待处理
# 远端资产上不存在的,标识为待处理,需要管理员介入
lost_users = ori_ga_users - remote_users
if lost_users:
queryset.filter(username__in=lost_users).update(
status=ConfirmOrIgnore.pending, remote_present=False
)
self.summary["lost_accounts"] += len(lost_users)
for username in lost_users:
self.result["lost_accounts"].append(
{
"asset": str(asset),
"username": username,
}
)
risk_analyser = AnalyseAccountRisk(self.check_risk)
risk_analyser.lost_accounts(asset, lost_users)
# 收集的账号 比 账号列表多的, 有可能是账号中删掉了, 但这时候状态已经是 confirm 了
# 标识状态为 待处理, 让管理员去确认
ga_added_users = ori_ga_users - ori_users
if ga_added_users:
queryset.filter(username__in=ga_added_users).update(status=ConfirmOrIgnore.pending)
# 收集的账号 比 账号列表少的
# 这个好像不不用对比,原始情况就这样
# 远端账号 比 账号列表少的
# 创建收集账号,标识 remote_present=False, 状态待处理
# 远端账号 比 账号列表多的
# 正常情况, 不用处理,因为远端账号会创建到收集账号,收集账号再去对比
# 不过这个好像也处理一下 status因为已存在这是状态应该是确认
(
queryset.filter(username__in=ori_users)
.exclude(status=ConfirmOrIgnore.confirmed)
.update(status=ConfirmOrIgnore.confirmed)
)
# 远端存在的账号,标识为已存在
(
queryset.filter(username__in=remote_users, remote_present=False).update(
remote_present=True
)
)
# 资产上没有的,标识为为存在
(
queryset.exclude(username__in=ori_users)
.filter(present=True)
.update(present=False)
)
(
queryset.filter(username__in=ori_users)
.filter(present=False)
.update(present=True)
)
@bulk_create_decorator(GatheredAccount)
def create_gathered_account(self, d):
ga = GatheredAccount()
for k, v in d.items():
setattr(ga, k, v)
return ga
@bulk_update_decorator(GatheredAccount, update_fields=common_risk_items)
def update_gathered_account(self, ori_account, d):
diff = get_items_diff(ori_account, d)
if not diff:
return
for k in diff:
if k not in common_risk_items:
continue
v = d.get(k)
setattr(ori_account, k, v)
return ori_account
def do_run(self, *args, **kwargs):
super().do_run(*args, **kwargs)
self.prefetch_origin_account_usernames()
risk_analyser = AnalyseAccountRisk(self.check_risk)
for asset, accounts_data in self.asset_account_info.items():
ori_users = self.ori_asset_usernames[str(asset.id)]
need_analyser_gather_account = []
with tmp_to_org(asset.org_id):
for d in accounts_data:
username = d["username"]
ori_account = self.ori_gathered_accounts_mapper.get(
"{}_{}".format(asset.id, username)
)
if not ori_account:
ga = self.create_gathered_account(d)
else:
ga = ori_account
self.update_gathered_account(ori_account, d)
ori_found = username in ori_users
need_analyser_gather_account.append((asset, ga, d, ori_found))
self.create_gathered_account.finish()
self.update_gathered_account.finish()
for analysis_data in need_analyser_gather_account:
risk_analyser.analyse_risk(*analysis_data)
self.update_gather_accounts_status(asset)
if not self.is_sync_account:
continue
gathered_accounts = GatheredAccount.objects.filter(asset=asset)
GatheredAccount.sync_accounts(gathered_accounts)
GatheredAccount.objects.filter(
asset=asset, username__in=ori_users, present=False
).update(
present=True
)
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
time.sleep(0.5)
def get_report_template(self):
return "accounts/gather_account_report.html"

View File

@ -1,75 +0,0 @@
import re
from django.utils import timezone
__all__ = ['GatherAccountsFilter']
# TODO 后期会挪到playbook中
class GatherAccountsFilter:
def __init__(self, tp):
self.tp = tp
@staticmethod
def mysql_filter(info):
result = {}
for _, user_dict in info.items():
for username, _ in user_dict.items():
if len(username.split('.')) == 1:
result[username] = {}
return result
@staticmethod
def postgresql_filter(info):
result = {}
for username in info:
result[username] = {}
return result
@staticmethod
def posix_filter(info):
username_pattern = re.compile(r'^(\S+)')
ip_pattern = re.compile(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})')
login_time_pattern = re.compile(r'\w{3} \w{3}\s+\d{1,2} \d{2}:\d{2}:\d{2} \d{4}')
result = {}
for line in info:
usernames = username_pattern.findall(line)
username = ''.join(usernames)
if username:
result[username] = {}
else:
continue
ip_addrs = ip_pattern.findall(line)
ip_addr = ''.join(ip_addrs)
if ip_addr:
result[username].update({'address': ip_addr})
login_times = login_time_pattern.findall(line)
if login_times:
datetime_str = login_times[0].split(' ', 1)[1] + " +0800"
date = timezone.datetime.strptime(datetime_str, '%b %d %H:%M:%S %Y %z')
result[username].update({'date': date})
return result
@staticmethod
def windows_filter(info):
info = info[4:-2]
result = {}
for i in info:
for username in i.split():
result[username] = {}
return result
def run(self, method_id_meta_mapper, info):
run_method_name = None
for k, v in method_id_meta_mapper.items():
if self.tp not in v['type']:
continue
run_method_name = k.replace(f'{v["method"]}_', '')
if not run_method_name:
return info
if hasattr(self, f'{run_method_name}_filter'):
return getattr(self, f'{run_method_name}_filter')(info)
return info

View File

@ -1,21 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Gather posix account
ansible.builtin.shell:
cmd: >
users=$(getent passwd | grep -v nologin | grep -v shutdown | awk -F":" '{ print $1 }');for i in $users;
do k=$(last -w -F $i -1 | head -1 | grep -v ^$ | awk '{ print $0 }')
if [ -n "$k" ]; then
echo $k
else
echo $i
fi;done
register: result
- name: Define info by set_fact
ansible.builtin.set_fact:
info: "{{ result.stdout_lines }}"
- debug:
var: info

View File

@ -1,14 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Gather windows account
ansible.builtin.win_shell: net user
register: result
ignore_errors: true
- name: Define info by set_fact
ansible.builtin.set_fact:
info: "{{ result.stdout_lines }}"
- debug:
var: info

View File

@ -1,139 +0,0 @@
from collections import defaultdict
from accounts.const import AutomationTypes
from accounts.models import GatheredAccount
from assets.models import Asset
from common.utils import get_logger
from orgs.utils import tmp_to_org
from users.models import User
from .filter import GatherAccountsFilter
from ..base.manager import AccountBasePlaybookManager
from ...notifications import GatherAccountChangeMsg
logger = get_logger(__name__)
class GatherAccountsManager(AccountBasePlaybookManager):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.host_asset_mapper = {}
self.asset_account_info = {}
self.asset_username_mapper = defaultdict(set)
self.is_sync_account = self.execution.snapshot.get('is_sync_account')
@classmethod
def method_type(cls):
return AutomationTypes.gather_accounts
def host_callback(self, host, asset=None, **kwargs):
super().host_callback(host, asset=asset, **kwargs)
self.host_asset_mapper[host['name']] = asset
return host
def filter_success_result(self, tp, result):
result = GatherAccountsFilter(tp).run(self.method_id_meta_mapper, result)
return result
def generate_data(self, asset, result):
data = []
for username, info in result.items():
self.asset_username_mapper[str(asset.id)].add(username)
d = {'asset': asset, 'username': username, 'present': True}
if info.get('date'):
d['date_last_login'] = info['date']
if info.get('address'):
d['address_last_login'] = info['address'][:32]
data.append(d)
return data
def collect_asset_account_info(self, asset, result):
data = self.generate_data(asset, result)
self.asset_account_info[asset] = data
@staticmethod
def get_nested_info(data, *keys):
for key in keys:
data = data.get(key, {})
if not data:
break
return data
def on_host_success(self, host, result):
info = self.get_nested_info(result, 'debug', 'res', 'info')
asset = self.host_asset_mapper.get(host)
if asset and info:
result = self.filter_success_result(asset.type, info)
self.collect_asset_account_info(asset, result)
else:
print(f'\033[31m Not found {host} info \033[0m\n')
def update_or_create_accounts(self):
for asset, data in self.asset_account_info.items():
with tmp_to_org(asset.org_id):
gathered_accounts = []
GatheredAccount.objects.filter(asset=asset, present=True).update(present=False)
for d in data:
username = d['username']
gathered_account, __ = GatheredAccount.objects.update_or_create(
defaults=d, asset=asset, username=username,
)
gathered_accounts.append(gathered_account)
if not self.is_sync_account:
continue
GatheredAccount.sync_accounts(gathered_accounts)
def run(self, *args, **kwargs):
super().run(*args, **kwargs)
users, change_info = self.generate_send_users_and_change_info()
self.update_or_create_accounts()
self.send_email_if_need(users, change_info)
def generate_send_users_and_change_info(self):
recipients = self.execution.recipients
if not self.asset_username_mapper or not recipients:
return None, None
users = User.objects.filter(id__in=recipients)
if not users.exists():
return users, None
asset_ids = self.asset_username_mapper.keys()
assets = Asset.objects.filter(id__in=asset_ids).prefetch_related('accounts')
gather_accounts = GatheredAccount.objects.filter(asset_id__in=asset_ids, present=True)
asset_id_map = {str(asset.id): asset for asset in assets}
asset_id_username = list(assets.values_list('id', 'accounts__username'))
asset_id_username.extend(list(gather_accounts.values_list('asset_id', 'username')))
system_asset_username_mapper = defaultdict(set)
for asset_id, username in asset_id_username:
system_asset_username_mapper[str(asset_id)].add(username)
change_info = defaultdict(dict)
for asset_id, usernames in self.asset_username_mapper.items():
system_usernames = system_asset_username_mapper.get(asset_id)
if not system_usernames:
continue
add_usernames = usernames - system_usernames
remove_usernames = system_usernames - usernames
if not add_usernames and not remove_usernames:
continue
change_info[str(asset_id_map[asset_id])] = {
'add_usernames': add_usernames,
'remove_usernames': remove_usernames
}
return users, dict(change_info)
@staticmethod
def send_email_if_need(users, change_info):
if not users or not change_info:
return
for user in users:
GatherAccountChangeMsg(user, change_info).publish_async()

View File

@ -0,0 +1,62 @@
- hosts: custom
gather_facts: no
vars:
ansible_connection: local
ansible_become: false
tasks:
- name: Test privileged account (paramiko)
ssh_ping:
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_secret_type: "{{ jms_account.secret_type }}"
login_private_key_path: "{{ jms_account.private_key_path }}"
become: "{{ jms_custom_become | default(False) }}"
become_method: "{{ jms_custom_become_method | default('su') }}"
become_user: "{{ jms_custom_become_user | default('') }}"
become_password: "{{ jms_custom_become_password | default('') }}"
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
register: ping_info
delegate_to: localhost
- name: Change asset password (paramiko)
custom_command:
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
login_secret_type: "{{ jms_account.secret_type }}"
login_private_key_path: "{{ jms_account.private_key_path }}"
become: "{{ jms_custom_become | default(False) }}"
become_method: "{{ jms_custom_become_method | default('su') }}"
become_user: "{{ jms_custom_become_user | default('') }}"
become_password: "{{ jms_custom_become_password | default('') }}"
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
name: "{{ account.username }}"
password: "{{ account.secret }}"
commands: "{{ params.commands }}"
first_conn_delay_time: "{{ first_conn_delay_time | default(0.5) }}"
ignore_errors: true
when: ping_info is succeeded and check_conn_after_change
register: change_info
delegate_to: localhost
- name: Verify password (paramiko)
ssh_ping:
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
become: "{{ account.become.ansible_become | default(False) }}"
become_method: su
become_user: "{{ account.become.ansible_user | default('') }}"
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
delegate_to: localhost
when: check_conn_after_change

View File

@ -0,0 +1,32 @@
id: push_account_by_ssh
name: "{{ 'SSH account push' | trans }}"
category:
- device
- host
type:
- all
method: push_account
protocol: ssh
priority: 50
params:
- name: commands
type: list
label: "{{ 'Params commands label' | trans }}"
default: [ '' ]
help_text: "{{ 'Params commands help text' | trans }}"
i18n:
SSH account push:
zh: '使用 SSH 命令行自定义推送'
ja: 'SSHコマンドラインを使用してプッシュをカスタマイズする'
en: 'Custom push using SSH command line'
Params commands help text:
zh: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 &#123;username&#125;、&#123;password&#125;、&#123;login_password&#125;格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. &#123;login_password&#125;<br />3. configure terminal<br />4. username &#123;username&#125; privilege 0 password &#123;password&#125; <br />5. end'
ja: 'カスタム コマンドに SSH 接続用のアカウント番号、パスワード、ユーザー パスワード フィールドを含める必要がある場合は、<br />&#123;ユーザー名&#125;、&#123;パスワード&#125;、&#123;login_password& を使用してください。 # 125; 形式。タスクの実行時に置き換えられます。 <br />たとえば、Cisco ホストのパスワードを変更するには、通常、次の 5 つのコマンドを設定する必要があります:<br />1.enable<br />2.&#123;login_password&#125;<br />3 .ターミナルの設定<br / >4. ユーザー名 &#123;ユーザー名&#125; 権限 0 パスワード &#123;パスワード&#125; <br />5. 終了'
en: 'If the custom command needs to include the account number, password, and user password field for SSH connection,<br />Please use &#123;username&#125;, &#123;password&#125;, &#123;login_password&# 125; format, which will be replaced when executing the task. <br />For example, to change the password of a Cisco host, you generally need to configure five commands:<br />1. enable<br />2. &#123;login_password&#125;<br />3. configure terminal<br / >4. username &#123;username&#125; privilege 0 password &#123;password&#125; <br />5. end'
Params commands label:
zh: '自定义命令'
ja: 'カスタムコマンド'
en: 'Custom command'

View File

@ -1,7 +1,7 @@
- hosts: mongodb
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test MongoDB connection
@ -53,3 +53,4 @@
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: mysql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
db_name: "{{ jms_asset.spec_info.db_name }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
@ -54,3 +54,4 @@
client_cert: "{{ ssl_cert if check_ssl and ssl_cert | length > 0 else omit }}"
client_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
filter: version
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: oracle
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test Oracle connection
@ -40,3 +40,4 @@
login_port: "{{ jms_asset.port }}"
login_database: "{{ jms_asset.spec_info.db_name }}"
mode: "{{ account.mode }}"
when: check_conn_after_change

View File

@ -1,7 +1,7 @@
- hosts: postgre
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
@ -59,5 +59,6 @@
when:
- result is succeeded
- change_info is succeeded
- check_conn_after_change
register: result
failed_when: not result.is_available

View File

@ -1,7 +1,7 @@
- hosts: sqlserver
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test SQLServer connection
@ -66,3 +66,4 @@
name: '{{ jms_asset.spec_info.db_name }}'
script: |
SELECT @@version
when: check_conn_after_change

View File

@ -9,7 +9,8 @@
database: passwd
key: "{{ account.username }}"
register: user_info
ignore_errors: yes # 忽略错误如果用户不存在时不会导致playbook失败
failed_when: false
changed_when: false
- name: "Add {{ account.username }} user"
ansible.builtin.user:
@ -18,10 +19,10 @@
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: yes
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1
state: present
when: user_info.failed
when: user_info.msg is defined
- name: "Set {{ account.username }} sudo setting"
ansible.builtin.lineinfile:
@ -31,7 +32,7 @@
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
validate: visudo -cf %s
when:
- user_info.failed or params.modify_sudo
- user_info.msg is defined or params.modify_sudo
- params.sudo
- name: "Change {{ account.username }} password"
@ -40,6 +41,7 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -82,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -100,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -111,6 +116,8 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@ -9,7 +9,8 @@
database: passwd
key: "{{ account.username }}"
register: user_info
ignore_errors: yes # 忽略错误如果用户不存在时不会导致playbook失败
failed_when: false
changed_when: false
- name: "Add {{ account.username }} user"
ansible.builtin.user:
@ -18,10 +19,10 @@
shell: "{{ params.shell if params.shell | length > 0 else omit }}"
home: "{{ params.home if params.home | length > 0 else '/home/' + account.username }}"
groups: "{{ params.groups if params.groups | length > 0 else omit }}"
append: yes
append: "{{ true if params.groups | length > 0 else false }}"
expires: -1
state: present
when: user_info.failed
when: user_info.msg is defined
- name: "Set {{ account.username }} sudo setting"
ansible.builtin.lineinfile:
@ -31,7 +32,7 @@
line: "{{ account.username + ' ALL=(ALL) NOPASSWD: ' + params.sudo }}"
validate: visudo -cf %s
when:
- user_info.failed or params.modify_sudo
- user_info.msg is defined or params.modify_sudo
- params.sudo
- name: "Change {{ account.username }} password"
@ -40,6 +41,7 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -82,6 +84,7 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -100,7 +103,9 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "password"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -111,6 +116,8 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: account.secret_type == "ssh_key"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@ -4,10 +4,6 @@
- name: Test privileged account
ansible.windows.win_ping:
# - name: Print variables
# debug:
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
- name: Push user password
ansible.windows.win_user:
fullname: "{{ account.username}}"
@ -28,4 +24,4 @@
vars:
ansible_user: "{{ account.username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -0,0 +1,27 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Push user password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -0,0 +1,25 @@
id: push_account_ad_windows
name: "{{ 'Windows account push' | trans }}"
version: 1
method: push_account
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account push:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号推送'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントをプッシュする'
en: 'Using Ansible module win_domain_user to push account'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -4,10 +4,6 @@
- name: Test privileged account
ansible.windows.win_ping:
# - name: Print variables
# debug:
# msg: "Username: {{ account.username }}, Password: {{ account.secret }}"
- name: Push user password
ansible.windows.win_user:
fullname: "{{ account.username}}"
@ -31,5 +27,5 @@
login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}"
gateway_args: "{{ jms_gateway | default({}) }}"
when: account.secret_type == "password"
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost

View File

@ -1,12 +1,15 @@
from django.utils.translation import gettext_lazy as _
from accounts.const import AutomationTypes
from common.utils import get_logger
from ..base.manager import AccountBasePlaybookManager
from ..change_secret.manager import ChangeSecretManager
from common.utils.timezone import local_now_filename
from ..base.manager import BaseChangeSecretPushManager
from ...models import PushSecretRecord
logger = get_logger(__name__)
class PushAccountManager(ChangeSecretManager, AccountBasePlaybookManager):
class PushAccountManager(BaseChangeSecretPushManager):
@staticmethod
def require_update_version(account, recorder):
@ -17,63 +20,47 @@ class PushAccountManager(ChangeSecretManager, AccountBasePlaybookManager):
def method_type(cls):
return AutomationTypes.push_account
# @classmethod
# def trigger_by_asset_create(cls, asset):
# automations = PushAccountAutomation.objects.filter(
# triggers__contains=TriggerChoice.on_asset_create
# )
# account_automation_map = {auto.username: auto for auto in automations}
#
# util = AssetPermissionUtil()
# permissions = util.get_permissions_for_assets([asset], with_node=True)
# account_permission_map = defaultdict(list)
# for permission in permissions:
# for account in permission.accounts:
# account_permission_map[account].append(permission)
#
# username_automation_map = {}
# for username, automation in account_automation_map.items():
# if username != '@USER':
# username_automation_map[username] = automation
# continue
#
# asset_permissions = account_permission_map.get(username)
# if not asset_permissions:
# continue
# asset_permissions = util.get_permissions([p.id for p in asset_permissions])
# usernames = asset_permissions.values_list('users__username', flat=True).distinct()
# for _username in usernames:
# username_automation_map[_username] = automation
#
# asset_usernames_exists = asset.accounts.values_list('username', flat=True)
# accounts_to_create = []
# accounts_to_push = []
# for username, automation in username_automation_map.items():
# if username in asset_usernames_exists:
# continue
#
# if automation.secret_strategy != SecretStrategy.custom:
# secret_generator = SecretGenerator(
# automation.secret_strategy, automation.secret_type,
# automation.password_rules
# )
# secret = secret_generator.get_secret()
# else:
# secret = automation.secret
#
# account = Account(
# username=username, secret=secret,
# asset=asset, secret_type=automation.secret_type,
# comment='Create by account creation {}'.format(automation.name),
# )
# accounts_to_create.append(account)
# if automation.action == 'create_and_push':
# accounts_to_push.append(account)
# else:
# accounts_to_create.append(account)
#
# logger.debug(f'Create account {account} for asset {asset}')
def get_secret(self, account):
secret = account.secret
if not secret:
secret = super().get_secret(account)
return secret
# @classmethod
# def trigger_by_permission_accounts_change(cls):
# pass
def gen_account_inventory(self, account, asset, h, path_dir):
secret = self.get_secret(account)
secret_type = account.secret_type
if not secret:
raise ValueError(_('Secret cannot be empty'))
self.get_or_create_record(asset, account, h['name'])
new_secret, private_key_path = self.handle_ssh_secret(secret_type, secret, path_dir)
h = self.gen_inventory(h, account, new_secret, private_key_path, asset)
return h
def get_or_create_record(self, asset, account, name):
asset_account_id = f'{asset.id}-{account.id}'
if asset_account_id in self.record_map:
record_id = self.record_map[asset_account_id]
recorder = PushSecretRecord.objects.filter(id=record_id).first()
else:
recorder = self.create_record(asset, account)
self.name_recorder_mapper[name] = recorder
return recorder
def create_record(self, asset, account):
recorder = PushSecretRecord(
asset=asset, account=account, execution=self.execution,
comment=f'{account.username}@{asset.address}'
)
return recorder
def print_summary(self):
print('\n\n' + '-' * 80)
plan_execution_end = _('Plan execution end')
print('{} {}\n'.format(plan_execution_end, local_now_filename()))
time_cost = _('Duration')
print('{}: {}s'.format(time_cost, self.duration))
def get_report_template(self):
return "accounts/push_account_report.html"

View File

@ -1,7 +1,7 @@
- hosts: mongodb
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: "Remove account"

View File

@ -1,7 +1,7 @@
- hosts: mysql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

View File

@ -1,7 +1,7 @@
- hosts: oracle
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: "Remove account"

View File

@ -1,7 +1,7 @@
- hosts: postgresql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

View File

@ -1,7 +1,7 @@
- hosts: sqlserver
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: "Remove account"
@ -11,4 +11,5 @@
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: "{{ jms_asset.spec_info.db_name }}"
script: "DROP USER {{ account.username }}"
script: "DROP LOGIN {{ account.username }}; select @@version"

View File

@ -0,0 +1,9 @@
- hosts: windows
gather_facts: no
tasks:
- name: "Remove account"
ansible.windows.win_domain_user:
name: "{{ account.username }}"
state: absent

View File

@ -0,0 +1,14 @@
id: remove_account_ad_windows
name: "{{ 'Windows account remove' | trans }}"
version: 1
method: remove_account
category:
- ds
type:
- windows_ad
i18n:
Windows account remove:
zh: 使用 Ansible 模块 win_domain_user 删除账号
ja: Ansible モジュール win_domain_user を使用してアカウントを削除する
en: Use the Ansible module win_domain_user to delete an account

View File

@ -1,10 +1,12 @@
import os
from collections import defaultdict
from copy import deepcopy
from django.db.models import QuerySet
from accounts.const import AutomationTypes
from accounts.models import Account
from accounts.models import Account, GatheredAccount, AccountRisk
from common.const import ConfirmOrIgnore
from common.utils import get_logger
from ..base.manager import AccountBasePlaybookManager
@ -12,59 +14,82 @@ logger = get_logger(__name__)
class RemoveAccountManager(AccountBasePlaybookManager):
super_accounts = ["root", "administrator"]
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.host_account_mapper = {}
self.host_account_mapper = dict()
self.host_accounts = defaultdict(list)
snapshot_account = self.execution.snapshot.get("accounts", [])
self.snapshot_asset_account_map = defaultdict(list)
for account in snapshot_account:
self.snapshot_asset_account_map[str(account["asset"])].append(account)
# 给 handler 使用
self.delete = self.execution.snapshot.get("delete", "both")
self.confirm_risk = self.execution.snapshot.get("risk", "")
def prepare_runtime_dir(self):
path = super().prepare_runtime_dir()
ansible_config_path = os.path.join(path, 'ansible.cfg')
ansible_config_path = os.path.join(path, "ansible.cfg")
with open(ansible_config_path, 'w') as f:
f.write('[ssh_connection]\n')
f.write('ssh_args = -o ControlMaster=no -o ControlPersist=no\n')
with open(ansible_config_path, "w") as f:
f.write("[ssh_connection]\n")
f.write("ssh_args = -o ControlMaster=no -o ControlPersist=no\n")
return path
@classmethod
def method_type(cls):
return AutomationTypes.remove_account
def get_gather_accounts(self, privilege_account, gather_accounts: QuerySet):
gather_account_ids = self.execution.snapshot['gather_accounts']
gather_accounts = gather_accounts.filter(id__in=gather_account_ids)
gather_accounts = gather_accounts.exclude(
username__in=[privilege_account.username, 'root', 'Administrator']
)
return gather_accounts
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
if host.get('error'):
def host_callback(
self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs
):
if host.get("error"):
return host
gather_accounts = asset.gatheredaccount_set.all()
gather_accounts = self.get_gather_accounts(account, gather_accounts)
inventory_hosts = []
accounts_to_remove = self.snapshot_asset_account_map.get(str(asset.id), [])
for gather_account in gather_accounts:
for account in accounts_to_remove:
username = account.get("username")
if not username or username.lower() in self.super_accounts:
print("Super account can not be remove: ", username)
continue
h = deepcopy(host)
h['name'] += '(' + gather_account.username + ')'
self.host_account_mapper[h['name']] = (asset, gather_account)
h['account'] = {'username': gather_account.username}
h["name"] += "(" + username + ")"
self.host_account_mapper[h["name"]] = account
h["account"] = {"username": username}
inventory_hosts.append(h)
return inventory_hosts
def on_host_success(self, host, result):
tuple_asset_gather_account = self.host_account_mapper.get(host)
if not tuple_asset_gather_account:
super().on_host_success(host, result)
account = self.host_account_mapper.get(host)
if not account:
return
asset, gather_account = tuple_asset_gather_account
try:
Account.objects.filter(
asset_id=asset.id,
username=gather_account.username
if self.delete == "both":
Account.objects.filter(
asset_id=account["asset"],
username=account["username"]
).delete()
if self.confirm_risk:
AccountRisk.objects.filter(
asset_id=account["asset"],
username=account["username"],
risk__in=[self.confirm_risk],
).update(status=ConfirmOrIgnore.confirmed)
GatheredAccount.objects.filter(
asset_id=account["asset"],
username=account["username"]
).delete()
gather_account.delete()
except Exception as e:
print(f'\033[31m Delete account {gather_account.username} failed: {e} \033[0m\n')
logger.error(
f"Failed to delete account {account['username']} on asset {account['asset']}: {e}"
)

View File

@ -10,6 +10,6 @@
rdp_ping:
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
login_user: "{{ account.username }}"
login_user: "{{ account.full_username }}"
login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}"

View File

@ -2,8 +2,10 @@ id: verify_account_by_rdp
name: "{{ 'Windows rdp account verify' | trans }}"
category:
- host
- ds
type:
- windows
- windows_ad
method: verify_account
protocol: rdp
priority: 1

View File

@ -21,3 +21,4 @@
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
recv_timeout: "{{ params.recv_timeout | default(30) }}"

View File

@ -1,7 +1,7 @@
- hosts: mongodb
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Verify account

View File

@ -1,7 +1,7 @@
- hosts: mysql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

View File

@ -1,7 +1,7 @@
- hosts: oracle
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Verify account

View File

@ -1,7 +1,7 @@
- hosts: postgresql
gather_facts: no
vars:
ansible_python_interpreter: /opt/py3/bin/python
ansible_python_interpreter: "{{ local_python_interpreter }}"
check_ssl: "{{ jms_asset.spec_info.use_ssl }}"
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"

Some files were not shown because too many files have changed in this diff Show More