mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-16 00:52:41 +00:00
Compare commits
527 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57dfd0fae2 | ||
|
|
81dee0c403 | ||
|
|
105ef791b8 | ||
|
|
a19c0bde60 | ||
|
|
3996daf4a7 | ||
|
|
ac235f788e | ||
|
|
67e334bf43 | ||
|
|
f7f9fb1bdf | ||
|
|
8979228e0b | ||
|
|
024beca690 | ||
|
|
5c0359e394 | ||
|
|
4ce4bde368 | ||
|
|
809bad271a | ||
|
|
d3bfc03849 | ||
|
|
04c0121b37 | ||
|
|
b97b50ab31 | ||
|
|
d8a8c8153b | ||
|
|
a68ad7be68 | ||
|
|
4041f1aeec | ||
|
|
59388655ea | ||
|
|
ef7463c588 | ||
|
|
7e7d6d94e6 | ||
|
|
6febc104de | ||
|
|
e629e6fb3f | ||
|
|
1b99a7e06f | ||
|
|
733b95ee99 | ||
|
|
a42641ca9a | ||
|
|
90c48d303e | ||
|
|
949166eaed | ||
|
|
b179264127 | ||
|
|
a2b9a5cee3 | ||
|
|
9e80b70c12 | ||
|
|
2454a07d38 | ||
|
|
ad7647c0cc | ||
|
|
18986bc805 | ||
|
|
157d81f117 | ||
|
|
8558204450 | ||
|
|
c18388e27a | ||
|
|
b6f86e8fb6 | ||
|
|
7280c6726e | ||
|
|
6f135c34c1 | ||
|
|
52830db500 | ||
|
|
6411642ced | ||
|
|
791b175465 | ||
|
|
d29e8317e5 | ||
|
|
a868751edd | ||
|
|
e03f49e52b | ||
|
|
2324cdc14e | ||
|
|
f3c90c6bbd | ||
|
|
ac6ffb24ee | ||
|
|
6f3ae4f73f | ||
|
|
1345998acd | ||
|
|
6812cbe314 | ||
|
|
3bba8e4043 | ||
|
|
8abf954015 | ||
|
|
1a77dc44cd | ||
|
|
13ae50f5d0 | ||
|
|
2029e9f8df | ||
|
|
1e97a23bc5 | ||
|
|
262d070f3c | ||
|
|
4ceaba60ed | ||
|
|
40b7331ec0 | ||
|
|
04cda3da0b | ||
|
|
1951c595ed | ||
|
|
c74584f63c | ||
|
|
c295f44d9c | ||
|
|
ead824a03c | ||
|
|
50be7c6fc8 | ||
|
|
bab4562820 | ||
|
|
104dd9721b | ||
|
|
cdcfdeefc5 | ||
|
|
613a7d63b5 | ||
|
|
c6a3a141bb | ||
|
|
93e5a0ba5c | ||
|
|
129c0e1bf4 | ||
|
|
62c57d2fdf | ||
|
|
4711813af8 | ||
|
|
384873b4cb | ||
|
|
33860bb955 | ||
|
|
9e410bb389 | ||
|
|
db2ab1513e | ||
|
|
18e525c943 | ||
|
|
9337463471 | ||
|
|
8fdd89e67c | ||
|
|
c7882a615f | ||
|
|
e6d50cc8b4 | ||
|
|
3bd7410ab8 | ||
|
|
c610ec797f | ||
|
|
188a2846ed | ||
|
|
df99067ee3 | ||
|
|
ca17faaf01 | ||
|
|
a487d30001 | ||
|
|
fae5d07df6 | ||
|
|
df31f47c68 | ||
|
|
d1acab3aa9 | ||
|
|
15363a7f72 | ||
|
|
d573ade525 | ||
|
|
7ac00d5fdf | ||
|
|
2f6c9f8260 | ||
|
|
41732d7a7b | ||
|
|
28d19fd91f | ||
|
|
65269db849 | ||
|
|
df2858470a | ||
|
|
1c8ad40565 | ||
|
|
78de2a2403 | ||
|
|
218f917f69 | ||
|
|
bb25bf7621 | ||
|
|
f6cc7046a2 | ||
|
|
1bc6e50b06 | ||
|
|
1d3135d2d7 | ||
|
|
308d87d021 | ||
|
|
db04f6ca18 | ||
|
|
a7cd0bc0fe | ||
|
|
24708a6c5e | ||
|
|
55a10a8d1d | ||
|
|
32b6a1f1a4 | ||
|
|
c1c70849e9 | ||
|
|
7a6ed91f62 | ||
|
|
497a52a509 | ||
|
|
57e12256e7 | ||
|
|
b8ec60dea1 | ||
|
|
c9afd94714 | ||
|
|
a0c61ab8cb | ||
|
|
567b62516a | ||
|
|
404fadd899 | ||
|
|
ee1ec6aeee | ||
|
|
783bddf2c7 | ||
|
|
5ae49295e9 | ||
|
|
8d6d188ac7 | ||
|
|
912ff3df24 | ||
|
|
995d8cadb9 | ||
|
|
6e5cea49ae | ||
|
|
a33a452434 | ||
|
|
fe2f54fcf6 | ||
|
|
1e3154d9b6 | ||
|
|
a1c09591d3 | ||
|
|
d4e0a51a08 | ||
|
|
bba4c15d6d | ||
|
|
3e33c74b64 | ||
|
|
556d29360e | ||
|
|
9329a1563c | ||
|
|
8bf11c9ade | ||
|
|
bbb802d894 | ||
|
|
8e7226d9dc | ||
|
|
2bd889e505 | ||
|
|
3dcfd0035a | ||
|
|
edfda5825c | ||
|
|
3a196f0814 | ||
|
|
a4a671afd4 | ||
|
|
c337bbff8f | ||
|
|
863140e185 | ||
|
|
ad0d264c2a | ||
|
|
7f85e503d5 | ||
|
|
61ff3db0f1 | ||
|
|
fa08517bea | ||
|
|
f86d045c01 | ||
|
|
1a7fd58abf | ||
|
|
d808256e6a | ||
|
|
305a1b10ed | ||
|
|
8c277e8875 | ||
|
|
ca965aca9e | ||
|
|
061b60ef59 | ||
|
|
c008115888 | ||
|
|
8d1fb84aaf | ||
|
|
43d61b5348 | ||
|
|
c26a786287 | ||
|
|
cb2bd0cf2c | ||
|
|
3048e6311b | ||
|
|
5e16b6387a | ||
|
|
93e1adf376 | ||
|
|
556bd3682e | ||
|
|
6bbbe312a2 | ||
|
|
1ac64db0ba | ||
|
|
fa54a98d6c | ||
|
|
31de9375e7 | ||
|
|
697270e3e6 | ||
|
|
56c324b04e | ||
|
|
984b94c874 | ||
|
|
50df7f1304 | ||
|
|
7bd7be78a4 | ||
|
|
8e5833aef0 | ||
|
|
f20b465ddf | ||
|
|
409d254a2e | ||
|
|
e6d30fa77d | ||
|
|
b25404cac1 | ||
|
|
ef4cc5f646 | ||
|
|
f0dc519423 | ||
|
|
2cb6da3129 | ||
|
|
1819083a25 | ||
|
|
bdeec0d3cb | ||
|
|
8fc5c4cf9e | ||
|
|
89051b2c67 | ||
|
|
9123839b48 | ||
|
|
258c8a30d1 | ||
|
|
af75b5269c | ||
|
|
0a66693a41 | ||
|
|
7151201d58 | ||
|
|
51820f23bf | ||
|
|
8772cd8c71 | ||
|
|
60cb1f8136 | ||
|
|
5f1b7ff8f9 | ||
|
|
37b150bc04 | ||
|
|
1432fe1609 | ||
|
|
8ae98887ee | ||
|
|
24a1738e73 | ||
|
|
188c04c9a6 | ||
|
|
bb4da12366 | ||
|
|
382112ee33 | ||
|
|
3e69e6840b | ||
|
|
a82ed3e924 | ||
|
|
b347acd5ec | ||
|
|
ccd6b01020 | ||
|
|
831b67eae4 | ||
|
|
3ab634d88e | ||
|
|
867ad94a30 | ||
|
|
7d0a19635a | ||
|
|
4642804077 | ||
|
|
d405bae205 | ||
|
|
68841d1f15 | ||
|
|
4cad5affec | ||
|
|
2f8a07e665 | ||
|
|
78133b0c60 | ||
|
|
88d9078c43 | ||
|
|
5559f112db | ||
|
|
9a4b32cb3c | ||
|
|
ddf4b61c9f | ||
|
|
0eaaa7b4f6 | ||
|
|
09160fed5d | ||
|
|
18af5e8c4a | ||
|
|
1ed388459b | ||
|
|
2e944c6898 | ||
|
|
8409523fee | ||
|
|
16634907b4 | ||
|
|
cfa5de13ab | ||
|
|
28c8ec1fab | ||
|
|
a14ebc5f0f | ||
|
|
6af20d298d | ||
|
|
795d6e01dc | ||
|
|
acf8b5798b | ||
|
|
abcd12f645 | ||
|
|
30fe5214c7 | ||
|
|
708a87c903 | ||
|
|
6a30e0739d | ||
|
|
3951b8b080 | ||
|
|
c295f1451a | ||
|
|
c4a94876cc | ||
|
|
dcab934d9f | ||
|
|
4ecb0b760f | ||
|
|
b27b02eb9d | ||
|
|
70cf847cd9 | ||
|
|
2099baaaff | ||
|
|
b22aed0cc3 | ||
|
|
3e7f83d44e | ||
|
|
40f8b99242 | ||
|
|
9ff345747b | ||
|
|
9319c4748c | ||
|
|
e8b4ee5c40 | ||
|
|
429e838973 | ||
|
|
ee1aff243c | ||
|
|
ea7133dea0 | ||
|
|
e7229963bf | ||
|
|
0f7b41d177 | ||
|
|
c4146744e5 | ||
|
|
dc32224294 | ||
|
|
d07a230ba6 | ||
|
|
f52a0ce960 | ||
|
|
9d17f27fb3 | ||
|
|
36d0b8d085 | ||
|
|
046356728a | ||
|
|
d34c4fb7ec | ||
|
|
ca49029d8f | ||
|
|
12036f8c96 | ||
|
|
60e455bea2 | ||
|
|
e7dd731139 | ||
|
|
88ae8ac67a | ||
|
|
626b6da9c4 | ||
|
|
cb8690dd63 | ||
|
|
2b2aa8f072 | ||
|
|
772e540527 | ||
|
|
ca5f6f3c6f | ||
|
|
29656b1630 | ||
|
|
bdf59da0f6 | ||
|
|
7b6eeb2e3d | ||
|
|
fed0732c1e | ||
|
|
c12efffcc9 | ||
|
|
358460e7f0 | ||
|
|
6319be0ea3 | ||
|
|
cc2b858769 | ||
|
|
585ddeb25b | ||
|
|
0eab83f73b | ||
|
|
62d403bf21 | ||
|
|
bb9d32dc18 | ||
|
|
e09383ecf4 | ||
|
|
4d7f8ffc71 | ||
|
|
af5295d30e | ||
|
|
5055d140fd | ||
|
|
2ca72a4bff | ||
|
|
de61e780e3 | ||
|
|
e1b3851be3 | ||
|
|
c665b0dbae | ||
|
|
0eaca0c1cb | ||
|
|
8824b6b54e | ||
|
|
4fd82b9946 | ||
|
|
1b1b70e7bd | ||
|
|
41541a91b9 | ||
|
|
93537c07a1 | ||
|
|
a770a19252 | ||
|
|
395636296d | ||
|
|
9967d52416 | ||
|
|
717f97cd88 | ||
|
|
dec8e3459a | ||
|
|
4a3d7a8524 | ||
|
|
f758414844 | ||
|
|
af080fe38d | ||
|
|
f0fbc73f73 | ||
|
|
ce2f6fdc84 | ||
|
|
2abca39597 | ||
|
|
11e538d417 | ||
|
|
5155b3c184 | ||
|
|
e724cdf53d | ||
|
|
191d37dd56 | ||
|
|
602192696c | ||
|
|
b262643f0a | ||
|
|
cd119a2999 | ||
|
|
d789810984 | ||
|
|
b5cfc6831b | ||
|
|
b64727e04c | ||
|
|
c7c0374c78 | ||
|
|
f3cf071362 | ||
|
|
27cbbfbc79 | ||
|
|
7047e445a3 | ||
|
|
06375110b9 | ||
|
|
0e6dbb3e5d | ||
|
|
bf7c05f753 | ||
|
|
1b4d389f2b | ||
|
|
0f11ca9c37 | ||
|
|
4537e30e4a | ||
|
|
2f71ee71b9 | ||
|
|
98644eeb61 | ||
|
|
001e5d857f | ||
|
|
bbcf992531 | ||
|
|
75aacd0da6 | ||
|
|
0aad0b7279 | ||
|
|
8ebcb4b73a | ||
|
|
88f60b58dd | ||
|
|
a6cc8a8b05 | ||
|
|
ca19e45905 | ||
|
|
c5bf4075e7 | ||
|
|
04ceca1b83 | ||
|
|
90228e69e0 | ||
|
|
62a2a74c27 | ||
|
|
927ae43af2 | ||
|
|
272f64d743 | ||
|
|
af2d927c1f | ||
|
|
011e9ffec4 | ||
|
|
8e65975cd7 | ||
|
|
9465138faf | ||
|
|
081089d636 | ||
|
|
5d80933e7b | ||
|
|
067a90ff9a | ||
|
|
05826abf9d | ||
|
|
e8363ddff8 | ||
|
|
de41747bb2 | ||
|
|
77067f18d5 | ||
|
|
3cbce63c54 | ||
|
|
c3c99cc5e8 | ||
|
|
b33e376c90 | ||
|
|
b619ebf423 | ||
|
|
b784d8ba87 | ||
|
|
fd7f73a18e | ||
|
|
8247f24d3f | ||
|
|
3749a0c6a1 | ||
|
|
fd41fd78cf | ||
|
|
8c31e8e634 | ||
|
|
648fabbe03 | ||
|
|
9388f37c39 | ||
|
|
b264db3e7e | ||
|
|
dbc5b7bdc3 | ||
|
|
ac20bc05ba | ||
|
|
7e2f81a418 | ||
|
|
2471787277 | ||
|
|
e6abdbdadc | ||
|
|
5ed65ca2ff | ||
|
|
ba6b1bf692 | ||
|
|
1aa58e1486 | ||
|
|
fa51465485 | ||
|
|
8f59bb2a48 | ||
|
|
2366da1485 | ||
|
|
f1a22575d3 | ||
|
|
7c1882bb53 | ||
|
|
97baeebb2a | ||
|
|
8b819f3779 | ||
|
|
d1420de4c2 | ||
|
|
379c7198da | ||
|
|
710cd0fb3b | ||
|
|
3fde31f2e0 | ||
|
|
d3355ab0ec | ||
|
|
81598a5264 | ||
|
|
298f6ba41d | ||
|
|
8e43e9ee2b | ||
|
|
adc8a8f7d3 | ||
|
|
1e3da50979 | ||
|
|
7ac385d64c | ||
|
|
2be74c4b84 | ||
|
|
75a72fb182 | ||
|
|
4c2274b14e | ||
|
|
a024f26768 | ||
|
|
2898c35970 | ||
|
|
62f5662bd0 | ||
|
|
0fe221019a | ||
|
|
d745314aa1 | ||
|
|
153fad9ac7 | ||
|
|
0792c7ec49 | ||
|
|
e617697553 | ||
|
|
9dc7da3595 | ||
|
|
f7f4d3a42e | ||
|
|
70fcbfe883 | ||
|
|
9e16b79abe | ||
|
|
8c839784fb | ||
|
|
10adb4e6b7 | ||
|
|
75c011f1c5 | ||
|
|
a882ca0d51 | ||
|
|
e0a2d03f44 | ||
|
|
2414f34a5a | ||
|
|
2aebfa51b2 | ||
|
|
f91bfedc50 | ||
|
|
68aad56bad | ||
|
|
556ce0a146 | ||
|
|
95f8b12912 | ||
|
|
25ae790f7d | ||
|
|
0464b1a9e6 | ||
|
|
3755f8f33a | ||
|
|
85b2ec2e6a | ||
|
|
9d1e94d3c2 | ||
|
|
be75edcb41 | ||
|
|
a5c6ba6cd6 | ||
|
|
81ef614820 | ||
|
|
c6949b4f68 | ||
|
|
a5acdb9f60 | ||
|
|
2366f02d10 | ||
|
|
dade0cadda | ||
|
|
e096244e75 | ||
|
|
3bc307d666 | ||
|
|
810c500402 | ||
|
|
6c0d0c3e92 | ||
|
|
af1150bb86 | ||
|
|
f7cbcc46f4 | ||
|
|
327c6beab4 | ||
|
|
196663f205 | ||
|
|
15423291cc | ||
|
|
021635b850 | ||
|
|
992c1407b6 | ||
|
|
1322106c91 | ||
|
|
42202bd528 | ||
|
|
b24d2f628a | ||
|
|
041302d5d2 | ||
|
|
a08dd5ee72 | ||
|
|
09ef72a4a8 | ||
|
|
26cf64ad2d | ||
|
|
0a04f0f351 | ||
|
|
1029556902 | ||
|
|
c41fc54380 | ||
|
|
c2fbe5c75a | ||
|
|
99e1b2cf92 | ||
|
|
33090c4cdf | ||
|
|
c8d7c7c56f | ||
|
|
aa7540045b | ||
|
|
e5f4b8000e | ||
|
|
44ffd09924 | ||
|
|
fe3059c1fd | ||
|
|
b76920a4bf | ||
|
|
b5ac5c5670 | ||
|
|
c3c0f87c01 | ||
|
|
d672122c79 | ||
|
|
0c71190337 | ||
|
|
14710e9c9e | ||
|
|
7eec50804c | ||
|
|
0fc5a33983 | ||
|
|
07779c5a7a | ||
|
|
d675b1d4fc | ||
|
|
514fa9cf0a | ||
|
|
2c73611cb4 | ||
|
|
83571718e9 | ||
|
|
521ec0245b | ||
|
|
e80b6936a2 | ||
|
|
2c4f937e0b | ||
|
|
2a5497de14 | ||
|
|
d87dc7cbd6 | ||
|
|
3b253e276c | ||
|
|
525538e775 | ||
|
|
2a8f8dd709 | ||
|
|
1e6e59d815 | ||
|
|
475678e29b | ||
|
|
7f52675bd3 | ||
|
|
6409b7deee | ||
|
|
4f37b2b920 | ||
|
|
c692eed3c6 | ||
|
|
dab8828b03 | ||
|
|
d692188a34 | ||
|
|
bc8df72603 | ||
|
|
bf466a1ba2 | ||
|
|
aff5b0035d | ||
|
|
b44fa64994 | ||
|
|
094446c548 | ||
|
|
64eda5f28b | ||
|
|
ab737ae09b | ||
|
|
55e04e8e9f | ||
|
|
5e70a8af15 | ||
|
|
031077c298 | ||
|
|
3f856e68f0 | ||
|
|
56862a965d | ||
|
|
e151548701 | ||
|
|
c56179e9e4 | ||
|
|
d23953932f | ||
|
|
2493647e5c | ||
|
|
00ed7bb025 | ||
|
|
b1aadf1ee9 | ||
|
|
86e6982383 | ||
|
|
dc42d1caa2 | ||
|
|
cb5d8fa13f | ||
|
|
3a3f7eaf71 | ||
|
|
9804ca5dd0 | ||
|
|
034d0e285c | ||
|
|
104d672634 | ||
|
|
529e3d12e0 | ||
|
|
978c1f6363 | ||
|
|
d25cde1bd5 |
@@ -7,4 +7,5 @@ django.db
|
||||
celerybeat.pid
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
apps/xpack/.git
|
||||
apps/xpack/.git
|
||||
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,2 +1,4 @@
|
||||
*.mmdb filter=lfs diff=lfs merge=lfs -text
|
||||
*.mo filter=lfs diff=lfs merge=lfs -text
|
||||
*.ipdb filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
|
||||
3
.github/release-config.yml
vendored
3
.github/release-config.yml
vendored
@@ -41,4 +41,5 @@ version-resolver:
|
||||
default: patch
|
||||
template: |
|
||||
## 版本变化 What’s Changed
|
||||
$CHANGES
|
||||
$CHANGES
|
||||
|
||||
|
||||
18
.github/workflows/lgtm.yml
vendored
18
.github/workflows/lgtm.yml
vendored
@@ -1,18 +0,0 @@
|
||||
name: Send LGTM reaction
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
pull_request_review:
|
||||
types: [submitted]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@1.0.0
|
||||
- uses: micnncim/action-lgtm-reaction@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
trigger: '["^.?lgtm$"]'
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -31,12 +31,14 @@ media
|
||||
celerybeat.pid
|
||||
django.db
|
||||
celerybeat-schedule.db
|
||||
data/static
|
||||
docs/_build/
|
||||
xpack
|
||||
xpack.bak
|
||||
logs/*
|
||||
### Vagrant ###
|
||||
.vagrant/
|
||||
release/*
|
||||
releashe
|
||||
/apps/script.py
|
||||
data/*
|
||||
|
||||
|
||||
@@ -126,3 +126,4 @@ enforcement ladder](https://github.com/mozilla/diversity).
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at
|
||||
https://www.contributor-covenant.org/translations.
|
||||
|
||||
|
||||
@@ -23,3 +23,4 @@ When reporting issues, always include:
|
||||
|
||||
Because the issues are open to the public, when submitting files, be sure to remove any sensitive information, e.g. user name, password, IP address, and company name. You can
|
||||
replace those parts with "REDACTED" or other strings like "****".
|
||||
|
||||
|
||||
142
Dockerfile
142
Dockerfile
@@ -1,6 +1,6 @@
|
||||
# 编译代码
|
||||
FROM python:3.8-slim as stage-build
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
@@ -9,74 +9,92 @@ ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
sed -i 's@http://.*.debian.org@http://mirrors.ustc.edu.cn@g' /etc/apt/sources.list \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ARG ORACLE_LIB_MAJOR=19
|
||||
ARG ORACLE_LIB_MINOR=10
|
||||
ENV ORACLE_FILE="instantclient-basiclite-linux.${TARGETARCH:-amd64}-${ORACLE_LIB_MAJOR}.${ORACLE_LIB_MINOR}.0.0.0dbru.zip"
|
||||
|
||||
RUN mkdir -p /opt/oracle/ \
|
||||
&& cd /opt/oracle/ \
|
||||
&& wget https://download.jumpserver.org/files/oracle/${ORACLE_FILE} \
|
||||
&& unzip instantclient-basiclite-linux.${TARGETARCH-amd64}-19.10.0.0.0dbru.zip \
|
||||
&& mv instantclient_${ORACLE_LIB_MAJOR}_${ORACLE_LIB_MINOR} instantclient \
|
||||
&& echo "/opt/oracle/instantclient" > /etc/ld.so.conf.d/oracle-instantclient.conf \
|
||||
&& ldconfig \
|
||||
&& rm -f ${ORACLE_FILE}
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iproute2 \
|
||||
iputils-ping \
|
||||
locales \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
vim \
|
||||
wget"
|
||||
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& apt update \
|
||||
&& apt -y install ${BUILD_DEPENDENCIES} \
|
||||
&& apt -y install ${DEPENDENCIES} \
|
||||
&& apt -y install ${TOOLS} \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& mv /bin/sh /bin/sh.bak \
|
||||
&& ln -s /bin/bash /bin/sh
|
||||
|
||||
RUN mkdir -p /opt/jumpserver/oracle/ \
|
||||
&& wget https://download.jumpserver.org/public/instantclient-basiclite-linux.x64-21.1.0.0.0.tar \
|
||||
&& tar xf instantclient-basiclite-linux.x64-21.1.0.0.0.tar -C /opt/jumpserver/oracle/ \
|
||||
&& echo "/opt/jumpserver/oracle/instantclient_21_1" > /etc/ld.so.conf.d/oracle-instantclient.conf \
|
||||
&& ldconfig \
|
||||
&& rm -f instantclient-basiclite-linux.x64-21.1.0.0.0.tar
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
RUN echo > config.yml \
|
||||
&& pip install --upgrade pip==20.2.4 setuptools==49.6.0 wheel==0.34.2 -i ${PIP_MIRROR} \
|
||||
&& pip install --no-cache-dir $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install --no-cache-dir -r requirements/requirements.txt -i ${PIP_MIRROR} \
|
||||
&& rm -rf ~/.cache/pip
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
|
||||
95
Dockerfile.loong64
Normal file
95
Dockerfile.loong64
Normal file
@@ -0,0 +1,95 @@
|
||||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
ARG BUILD_DEPENDENCIES=" \
|
||||
g++ \
|
||||
make \
|
||||
pkg-config"
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
default-libmysqlclient-dev \
|
||||
freetds-dev \
|
||||
libpq-dev \
|
||||
libffi-dev \
|
||||
libjpeg-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libxml2-dev \
|
||||
libxmlsec1-dev \
|
||||
libxmlsec1-openssl \
|
||||
libaio-dev \
|
||||
openssh-client \
|
||||
sshpass"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
curl \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
netcat \
|
||||
redis-server \
|
||||
telnet \
|
||||
vim \
|
||||
unzip \
|
||||
wget"
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
set -ex \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
&& apt-get update \
|
||||
&& apt-get -y install --no-install-recommends ${BUILD_DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${DEPENDENCIES} \
|
||||
&& apt-get -y install --no-install-recommends ${TOOLS} \
|
||||
&& mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config \
|
||||
&& sed -i "s@# alias l@alias l@g" ~/.bashrc \
|
||||
&& echo "set mouse-=a" > ~/.vimrc \
|
||||
&& echo "no" | dpkg-reconfigure dash \
|
||||
&& echo "zh_CN.UTF-8" | dpkg-reconfigure locales \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /tmp/build
|
||||
COPY ./requirements ./requirements
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
set -ex \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/cryptography/cryptography-36.0.1-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install https://download.jumpserver.org/pypi/simple/greenlet/greenlet-1.1.2-cp38-cp38-linux_loongarch64.whl \
|
||||
&& pip install $(grep 'PyNaCl' requirements/requirements.txt) \
|
||||
&& GRPC_PYTHON_BUILD_SYSTEM_OPENSSL=true pip install grpcio \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
&& rm -rf /tmp/build
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
ENTRYPOINT ["./entrypoint.sh"]
|
||||
3
LICENSE
3
LICENSE
@@ -671,4 +671,5 @@ into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||
|
||||
|
||||
69
README.md
69
README.md
@@ -1,10 +1,13 @@
|
||||
<p align="center"><a href="https://jumpserver.org"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a></p>
|
||||
<p align="center">
|
||||
<a href="https://jumpserver.org"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
</p>
|
||||
<h3 align="center">多云环境下更好用的堡垒机</h3>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.gnu.org/licenses/gpl-3.0.html"><img src="https://img.shields.io/github/license/jumpserver/jumpserver" alt="License: GPLv3"></a>
|
||||
<a href="https://shields.io/github/downloads/jumpserver/jumpserver/total"><img src="https://shields.io/github/downloads/jumpserver/jumpserver/total" alt=" release"></a>
|
||||
<a href="https://hub.docker.com/u/jumpserver"><img src="https://img.shields.io/docker/pulls/jumpserver/jms_all.svg" alt="Codacy"></a>
|
||||
<a href="https://github.com/jumpserver/jumpserver/commits"><img alt="GitHub last commit" src="https://img.shields.io/github/last-commit/jumpserver/jumpserver.svg" /></a>
|
||||
<a href="https://github.com/jumpserver/jumpserver"><img src="https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square" alt="Stars"></a>
|
||||
</p>
|
||||
|
||||
@@ -13,24 +16,22 @@
|
||||
|
||||
|
||||
|
||||
JumpServer 是全球首款开源的堡垒机,使用 GPLv3 开源协议,是符合 4A 规范的运维安全审计系统。
|
||||
JumpServer 是广受欢迎的开源堡垒机,是符合 4A 规范的专业运维安全审计系统。
|
||||
|
||||
JumpServer 使用 Python 开发,遵循 Web 2.0 规范,配备了业界领先的 Web Terminal 方案,交互界面美观、用户体验好。
|
||||
JumpServer 使用 Python 开发,配备了业界领先的 Web Terminal 方案,交互界面美观、用户体验好。
|
||||
|
||||
JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向扩展,无资产数量及并发限制。
|
||||
|
||||
改变世界,从一点点开始 ...
|
||||
|
||||
> 如需进一步了解 JumpServer 开源项目,推荐阅读 [JumpServer 的初心和使命](https://mp.weixin.qq.com/s/S6q_2rP_9MwaVwyqLQnXzA)
|
||||
|
||||
### 特色优势
|
||||
|
||||
- 开源: 零门槛,线上快速获取和安装;
|
||||
- 分布式: 轻松支持大规模并发访问;
|
||||
- 无插件: 仅需浏览器,极致的 Web Terminal 使用体验;
|
||||
- 多租户: 一套系统,多个子公司或部门同时使用;
|
||||
- 多云支持: 一套系统,同时管理不同云上面的资产;
|
||||
- 云端存储: 审计录像云端存储,永不丢失;
|
||||
- 多租户: 一套系统,多个子公司和部门同时使用;
|
||||
- 多应用支持: 数据库,Windows远程应用,Kubernetes。
|
||||
|
||||
### UI 展示
|
||||
@@ -55,12 +56,15 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
- [手动安装](https://github.com/jumpserver/installer)
|
||||
|
||||
### 组件项目
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI 项目
|
||||
- [Luna](https://github.com/jumpserver/luna) JumpServer Web Terminal 项目
|
||||
- [KoKo](https://github.com/jumpserver/koko) JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco)
|
||||
- [Lion](https://github.com/jumpserver/lion-release) JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/)
|
||||
- [Clients](https://github.com/jumpserver/clients) JumpServer 客户端 项目
|
||||
- [Installer](https://github.com/jumpserver/installer) JumpServer 安装包 项目
|
||||
| 项目 | 状态 | 描述 |
|
||||
| --------------------------------------------------------------------------- | ------------------- | ---------------------------------------- |
|
||||
| [Lina](https://github.com/jumpserver/lina) | <a href="https://github.com/jumpserver/lina/releases"><img alt="Lina release" src="https://img.shields.io/github/release/jumpserver/lina.svg" /></a> | JumpServer Web UI 项目 |
|
||||
| [Luna](https://github.com/jumpserver/luna) | <a href="https://github.com/jumpserver/luna/releases"><img alt="Luna release" src="https://img.shields.io/github/release/jumpserver/luna.svg" /></a> | JumpServer Web Terminal 项目 |
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco) |
|
||||
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
||||
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
||||
| [Clients](https://github.com/jumpserver/clients) | <a href="https://github.com/jumpserver/clients/releases"><img alt="Clients release" src="https://img.shields.io/github/release/jumpserver/clients.svg" /> | JumpServer 客户端 项目 |
|
||||
| [Installer](https://github.com/jumpserver/installer)| <a href="https://github.com/jumpserver/installer/releases"><img alt="Installer release" src="https://img.shields.io/github/release/jumpserver/installer.svg" /> | JumpServer 安装包 项目 |
|
||||
|
||||
### 社区
|
||||
|
||||
@@ -75,27 +79,13 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
|
||||
感谢以下贡献者,让 JumpServer 更加完善
|
||||
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/jumpserver" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/koko/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/koko" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/lina/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/lina" />
|
||||
</a>
|
||||
|
||||
<a href="https://github.com/jumpserver/luna/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/luna" />
|
||||
</a>
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors"><img src="https://opencollective.com/jumpserver/contributors.svg?width=890&button=false" /></a>
|
||||
|
||||
|
||||
|
||||
### 致谢
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web页面连接 RDP, SSH, VNC协议设备,JumpServer 图形化组件 Lion 依赖
|
||||
- [OmniDB](https://omnidb.org/) Web页面连接使用数据库,JumpServer Web数据库依赖
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web页面连接 RDP, SSH, VNC 协议设备,JumpServer 图形化组件 Lion 依赖
|
||||
- [OmniDB](https://omnidb.org/) Web 页面连接使用数据库,JumpServer Web 数据库依赖
|
||||
|
||||
|
||||
### JumpServer 企业版
|
||||
@@ -103,14 +93,18 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
|
||||
### 案例研究
|
||||
|
||||
- [JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147);
|
||||
- [JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882);
|
||||
- [携程 JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851);
|
||||
- [小红书的JumpServer堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516);
|
||||
- [JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732);
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708);
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687);
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)。
|
||||
- [腾讯海外游戏:基于JumpServer构建游戏安全运营能力](https://blog.fit2cloud.com/?p=3704)
|
||||
- [万华化学:通过JumpServer管理全球化分布式IT资产,并且实现与云管平台的联动](https://blog.fit2cloud.com/?p=3504)
|
||||
- [雪花啤酒:JumpServer堡垒机使用体会](https://blog.fit2cloud.com/?p=3412)
|
||||
- [顺丰科技:JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147)
|
||||
- [沐瞳游戏:通过JumpServer管控多项目分布式资产](https://blog.fit2cloud.com/?p=3213)
|
||||
- [携程:JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851)
|
||||
- [大智慧:JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882)
|
||||
- [小红书:JumpServer 堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516)
|
||||
- [中手游:JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732)
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708)
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687)
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)
|
||||
|
||||
### 安全说明
|
||||
|
||||
@@ -131,4 +125,3 @@ Licensed under The GNU General Public License version 3 (GPLv3) (the "License")
|
||||
https://www.gnu.org/licenses/gpl-3.0.html
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
|
||||
@@ -92,4 +92,3 @@ Licensed under The GNU General Public License version 3 (GPLv3) (the "License")
|
||||
https://www.gnu.org/licenses/gpl-3.0.htmll
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
|
||||
@@ -18,3 +18,4 @@ All security bugs should be reported to the contact as below:
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
|
||||
56
Vagrantfile
vendored
56
Vagrantfile
vendored
@@ -1,56 +0,0 @@
|
||||
# -*- mode: ruby -*-
|
||||
# vi: set ft=ruby :
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
# The most common configuration options are documented and commented below.
|
||||
# For a complete reference, please see the online documentation at
|
||||
# https://docs.vagrantup.com.
|
||||
|
||||
# Every Vagrant development environment requires a box. You can search for
|
||||
# boxes at https://vagrantcloud.com/search.
|
||||
config.vm.box_check_update = false
|
||||
config.vm.box = "centos/7"
|
||||
config.vm.hostname = "jumpserver"
|
||||
config.vm.network "private_network", ip: "172.17.8.101"
|
||||
config.vm.provider "virtualbox" do |vb|
|
||||
vb.memory = "4096"
|
||||
vb.cpus = 2
|
||||
vb.name = "jumpserver"
|
||||
end
|
||||
|
||||
config.vm.synced_folder ".", "/vagrant", type: "rsync",
|
||||
rsync__verbose: true,
|
||||
rsync__exclude: ['.git*', 'node_modules*','*.log','*.box','Vagrantfile']
|
||||
|
||||
config.vm.provision "shell", inline: <<-SHELL
|
||||
## 设置yum的阿里云源
|
||||
sudo curl -o /etc/yum.repos.d/CentOS-Base.repo http://mirrors.aliyun.com/repo/Centos-7.repo
|
||||
sudo sed -i -e '/mirrors.cloud.aliyuncs.com/d' -e '/mirrors.aliyuncs.com/d' /etc/yum.repos.d/CentOS-Base.repo
|
||||
sudo curl -o /etc/yum.repos.d/epel.repo http://mirrors.aliyun.com/repo/epel-7.repo
|
||||
sudo yum makecache
|
||||
|
||||
## 安装依赖包
|
||||
sudo yum install -y python36 python36-devel python36-pip \
|
||||
libtiff-devel libjpeg-devel libzip-devel freetype-devel \
|
||||
lcms2-devel libwebp-devel tcl-devel tk-devel sshpass \
|
||||
openldap-devel mariadb-devel mysql-devel libffi-devel \
|
||||
openssh-clients telnet openldap-clients gcc
|
||||
|
||||
## 配置pip阿里云源
|
||||
mkdir /home/vagrant/.pip
|
||||
cat << EOF | sudo tee -a /home/vagrant/.pip/pip.conf
|
||||
[global]
|
||||
timeout = 6000
|
||||
index-url = https://mirrors.aliyun.com/pypi/simple/
|
||||
|
||||
[install]
|
||||
use-mirrors = true
|
||||
mirrors = https://mirrors.aliyun.com/pypi/simple/
|
||||
trusted-host=mirrors.aliyun.com
|
||||
EOF
|
||||
|
||||
python3.6 -m venv /home/vagrant/venv
|
||||
source /home/vagrant/venv/bin/activate
|
||||
echo 'source /home/vagrant/venv/bin/activate' >> /home/vagrant/.bash_profile
|
||||
SHELL
|
||||
end
|
||||
@@ -47,7 +47,7 @@ class LoginAssetCheckAPI(CreateAPIView):
|
||||
asset=self.serializer.asset,
|
||||
system_user=self.serializer.system_user,
|
||||
assignees=acl.reviewers.all(),
|
||||
org_id=self.serializer.org.id
|
||||
org_id=self.serializer.org.id,
|
||||
)
|
||||
confirm_status_url = reverse(
|
||||
view_name='api-tickets:super-ticket-status',
|
||||
@@ -59,7 +59,7 @@ class LoginAssetCheckAPI(CreateAPIView):
|
||||
external=True, api_to_ui=True
|
||||
)
|
||||
ticket_detail_url = '{url}?type={type}'.format(url=ticket_detail_url, type=ticket.type)
|
||||
ticket_assignees = ticket.current_node.first().ticket_assignees.all()
|
||||
ticket_assignees = ticket.current_step.ticket_assignees.all()
|
||||
data = {
|
||||
'check_confirm_status': {'method': 'GET', 'url': confirm_status_url},
|
||||
'close_confirm': {'method': 'DELETE', 'url': confirm_status_url},
|
||||
|
||||
@@ -44,85 +44,49 @@ class LoginACL(BaseACL):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def action_reject(self):
|
||||
return self.action == self.ActionChoices.reject
|
||||
|
||||
@property
|
||||
def action_allow(self):
|
||||
return self.action == self.ActionChoices.allow
|
||||
def is_action(self, action):
|
||||
return self.action == action
|
||||
|
||||
@classmethod
|
||||
def filter_acl(cls, user):
|
||||
return user.login_acls.all().valid().distinct()
|
||||
|
||||
@staticmethod
|
||||
def allow_user_confirm_if_need(user, ip):
|
||||
acl = LoginACL.filter_acl(user).filter(
|
||||
action=LoginACL.ActionChoices.confirm
|
||||
).first()
|
||||
acl = acl if acl and acl.reviewers.exists() else None
|
||||
if not acl:
|
||||
return False, acl
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
return is_contain_ip and is_contain_time_period, acl
|
||||
def match(user, ip):
|
||||
acls = LoginACL.filter_acl(user)
|
||||
if not acls:
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def allow_user_to_login(user, ip):
|
||||
acl = LoginACL.filter_acl(user).exclude(
|
||||
action=LoginACL.ActionChoices.confirm
|
||||
).first()
|
||||
if not acl:
|
||||
return True, ''
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
for acl in acls:
|
||||
if acl.is_action(LoginACL.ActionChoices.confirm) and not acl.reviewers.exists():
|
||||
continue
|
||||
ip_group = acl.rules.get('ip_group')
|
||||
time_periods = acl.rules.get('time_period')
|
||||
is_contain_ip = contains_ip(ip, ip_group)
|
||||
is_contain_time_period = contains_time_period(time_periods)
|
||||
if is_contain_ip and is_contain_time_period:
|
||||
# 满足条件,则返回
|
||||
return acl
|
||||
|
||||
reject_type = ''
|
||||
if is_contain_ip and is_contain_time_period:
|
||||
# 满足条件
|
||||
allow = acl.action_allow
|
||||
if not allow:
|
||||
reject_type = 'ip' if is_contain_ip else 'time'
|
||||
else:
|
||||
# 不满足条件
|
||||
# 如果acl本身允许,那就拒绝;如果本身拒绝,那就允许
|
||||
allow = not acl.action_allow
|
||||
if not allow:
|
||||
reject_type = 'ip' if not is_contain_ip else 'time'
|
||||
|
||||
return allow, reject_type
|
||||
|
||||
@staticmethod
|
||||
def construct_confirm_ticket_meta(request=None):
|
||||
def create_confirm_ticket(self, request):
|
||||
from tickets import const
|
||||
from tickets.models import ApplyLoginTicket
|
||||
from orgs.models import Organization
|
||||
title = _('Login confirm') + ' {}'.format(self.user)
|
||||
login_ip = get_request_ip(request) if request else ''
|
||||
login_ip = login_ip or '0.0.0.0'
|
||||
login_city = get_ip_city(login_ip)
|
||||
login_datetime = local_now_display()
|
||||
ticket_meta = {
|
||||
'apply_login_ip': login_ip,
|
||||
'apply_login_city': login_city,
|
||||
'apply_login_datetime': login_datetime,
|
||||
}
|
||||
return ticket_meta
|
||||
|
||||
def create_confirm_ticket(self, request=None):
|
||||
from tickets import const
|
||||
from tickets.models import Ticket
|
||||
from orgs.models import Organization
|
||||
ticket_title = _('Login confirm') + ' {}'.format(self.user)
|
||||
ticket_meta = self.construct_confirm_ticket_meta(request)
|
||||
data = {
|
||||
'title': ticket_title,
|
||||
'type': const.TicketType.login_confirm.value,
|
||||
'meta': ticket_meta,
|
||||
'title': title,
|
||||
'type': const.TicketType.login_confirm,
|
||||
'applicant': self.user,
|
||||
'apply_login_city': login_city,
|
||||
'apply_login_ip': login_ip,
|
||||
'apply_login_datetime': login_datetime,
|
||||
'org_id': Organization.ROOT_ID,
|
||||
}
|
||||
ticket = Ticket.objects.create(**data)
|
||||
ticket.create_process_map_and_node(self.reviewers.all())
|
||||
ticket.open(self.user)
|
||||
ticket = ApplyLoginTicket.objects.create(**data)
|
||||
assignees = self.reviewers.all()
|
||||
ticket.open_by_system(assignees)
|
||||
return ticket
|
||||
|
||||
@@ -85,19 +85,18 @@ class LoginAssetACL(BaseACL, OrgModelMixin):
|
||||
@classmethod
|
||||
def create_login_asset_confirm_ticket(cls, user, asset, system_user, assignees, org_id):
|
||||
from tickets.const import TicketType
|
||||
from tickets.models import Ticket
|
||||
from tickets.models import ApplyLoginAssetTicket
|
||||
title = _('Login asset confirm') + ' ({})'.format(user)
|
||||
data = {
|
||||
'title': _('Login asset confirm') + ' ({})'.format(user),
|
||||
'title': title,
|
||||
'type': TicketType.login_asset_confirm,
|
||||
'meta': {
|
||||
'apply_login_user': str(user),
|
||||
'apply_login_asset': str(asset),
|
||||
'apply_login_system_user': str(system_user),
|
||||
},
|
||||
'applicant': user,
|
||||
'apply_login_user': user,
|
||||
'apply_login_asset': asset,
|
||||
'apply_login_system_user': system_user,
|
||||
'org_id': org_id,
|
||||
}
|
||||
ticket = Ticket.objects.create(**data)
|
||||
ticket.create_process_map_and_node(assignees)
|
||||
ticket.open(applicant=user)
|
||||
ticket = ApplyLoginAssetTicket.objects.create(**data)
|
||||
ticket.open_by_system(assignees)
|
||||
return ticket
|
||||
|
||||
|
||||
@@ -2,14 +2,16 @@
|
||||
#
|
||||
|
||||
from django_filters import rest_framework as filters
|
||||
from django.db.models import F, Q
|
||||
from django.db.models import Q
|
||||
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from common.drf.api import JMSBulkModelViewSet
|
||||
from common.mixins import RecordViewLogMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from authentication.const import ConfirmType
|
||||
from rbac.permissions import RBACPermission
|
||||
from assets.models import SystemUser
|
||||
from ..models import Account
|
||||
from ..hands import NeedMFAVerify
|
||||
from .. import serializers
|
||||
|
||||
|
||||
@@ -54,9 +56,9 @@ class SystemUserAppRelationViewSet(ApplicationAccountViewSet):
|
||||
perm_model = SystemUser
|
||||
|
||||
|
||||
class ApplicationAccountSecretViewSet(ApplicationAccountViewSet):
|
||||
class ApplicationAccountSecretViewSet(RecordViewLogMixin, ApplicationAccountViewSet):
|
||||
serializer_class = serializers.AppAccountSecretSerializer
|
||||
permission_classes = [RBACPermission, NeedMFAVerify]
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
http_method_names = ['get', 'options']
|
||||
rbac_perms = {
|
||||
'retrieve': 'applications.view_applicationaccountsecret',
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
from django.shortcuts import get_object_or_404
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
||||
from common.tree import TreeNodeSerializer
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from .. import serializers
|
||||
|
||||
@@ -7,3 +7,7 @@ from django.apps import AppConfig
|
||||
class ApplicationsConfig(AppConfig):
|
||||
name = 'applications'
|
||||
verbose_name = _('Applications')
|
||||
|
||||
def ready(self):
|
||||
from . import signal_handlers
|
||||
super().ready()
|
||||
|
||||
@@ -27,6 +27,7 @@ class AppType(models.TextChoices):
|
||||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
|
||||
# remote-app category
|
||||
chrome = 'chrome', 'Chrome'
|
||||
@@ -42,7 +43,7 @@ class AppType(models.TextChoices):
|
||||
return {
|
||||
AppCategory.db: [
|
||||
cls.mysql, cls.mariadb, cls.oracle, cls.pgsql,
|
||||
cls.sqlserver, cls.redis, cls.mongodb
|
||||
cls.sqlserver, cls.redis, cls.mongodb, cls.clickhouse
|
||||
],
|
||||
AppCategory.remote_app: [
|
||||
cls.chrome, cls.mysql_workbench,
|
||||
@@ -82,4 +83,4 @@ class AppType(models.TextChoices):
|
||||
|
||||
if AppCategory.is_xpack(category):
|
||||
return True
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver']
|
||||
return tp in ['oracle', 'postgresql', 'sqlserver', 'clickhouse']
|
||||
|
||||
@@ -11,5 +11,4 @@
|
||||
"""
|
||||
|
||||
|
||||
from common.permissions import NeedMFAVerify
|
||||
from users.models import User, UserGroup
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Generated by Django 2.1.7 on 2019-05-20 11:04
|
||||
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
@@ -23,7 +23,7 @@ class Migration(migrations.Migration):
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('type', models.CharField(choices=[('Browser', (('chrome', 'Chrome'),)), ('Database tools', (('mysql_workbench', 'MySQL Workbench'),)), ('Virtualization tools', (('vmware_client', 'vSphere Client'),)), ('custom', 'Custom')], default='chrome', max_length=128, verbose_name='App type')),
|
||||
('path', models.CharField(max_length=128, verbose_name='App path')),
|
||||
('params', common.fields.model.EncryptJsonDictTextField(blank=True, default={}, max_length=4096, null=True, verbose_name='Parameters')),
|
||||
('params', common.db.fields.EncryptJsonDictTextField(blank=True, default={}, max_length=4096, null=True, verbose_name='Parameters')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('comment', models.TextField(blank=True, default='', max_length=128, verbose_name='Comment')),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 3.1.12 on 2021-08-26 09:07
|
||||
|
||||
import assets.models.base
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
@@ -26,9 +26,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z_@\\-\\.]*$', 'Special char not allowed')], verbose_name='Username')),
|
||||
('password', common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('password', common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(blank=True, editable=False, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(blank=True, editable=False, verbose_name='Date updated')),
|
||||
@@ -56,9 +56,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z_@\\-\\.]*$', 'Special char not allowed')], verbose_name='Username')),
|
||||
('password', common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('password', common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
|
||||
22
apps/applications/migrations/0021_auto_20220629_1826.py
Normal file
22
apps/applications/migrations/0021_auto_20220629_1826.py
Normal file
@@ -0,0 +1,22 @@
|
||||
# Generated by Django 3.1.14 on 2022-06-29 10:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0020_auto_20220316_2028'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='historicalaccount',
|
||||
options={'get_latest_by': ('history_date', 'history_id'), 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical Application account', 'verbose_name_plural': 'historical Application accounts'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='historicalaccount',
|
||||
name='history_date',
|
||||
field=models.DateTimeField(db_index=True),
|
||||
),
|
||||
]
|
||||
23
apps/applications/migrations/0022_auto_20220714_1046.py
Normal file
23
apps/applications/migrations/0022_auto_20220714_1046.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# Generated by Django 3.2.12 on 2022-07-14 02:46
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_db_oracle_version_to_attrs(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
model = apps.get_model("applications", "Application")
|
||||
oracles = list(model.objects.using(db_alias).filter(type='oracle'))
|
||||
for o in oracles:
|
||||
o.attrs['version'] = '12c'
|
||||
model.objects.using(db_alias).bulk_update(oracles, ['attrs'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0021_auto_20220629_1826'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_db_oracle_version_to_attrs)
|
||||
]
|
||||
48
apps/applications/migrations/0023_auto_20220715_1556.py
Normal file
48
apps/applications/migrations/0023_auto_20220715_1556.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Generated by Django 3.1.14 on 2022-07-15 07:56
|
||||
import time
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def migrate_account_dirty_data(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
account_model = apps.get_model('applications', 'Account')
|
||||
|
||||
count = 0
|
||||
bulk_size = 1000
|
||||
|
||||
while True:
|
||||
accounts = account_model.objects.using(db_alias) \
|
||||
.filter(org_id='')[count:count + bulk_size]
|
||||
|
||||
if not accounts:
|
||||
break
|
||||
|
||||
accounts = list(accounts)
|
||||
start = time.time()
|
||||
for i in accounts:
|
||||
if i.app:
|
||||
org_id = i.app.org_id
|
||||
elif i.systemuser:
|
||||
org_id = i.systemuser.org_id
|
||||
else:
|
||||
org_id = ''
|
||||
if org_id:
|
||||
i.org_id = org_id
|
||||
|
||||
account_model.objects.bulk_update(accounts, ['org_id', ])
|
||||
print("Update account org is empty: {}-{} using: {:.2f}s".format(
|
||||
count, count + len(accounts), time.time() - start
|
||||
))
|
||||
count += len(accounts)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('applications', '0022_auto_20220714_1046'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_account_dirty_data),
|
||||
]
|
||||
18
apps/applications/migrations/0024_alter_application_type.py
Normal file
18
apps/applications/migrations/0024_alter_application_type.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0023_auto_20220715_1556'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='application',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('mysql', 'MySQL'), ('mariadb', 'MariaDB'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type'),
|
||||
),
|
||||
]
|
||||
@@ -11,7 +11,6 @@ from common.tree import TreeNode
|
||||
from common.utils import is_uuid
|
||||
from assets.models import Asset, SystemUser
|
||||
|
||||
from ..utils import KubernetesTree
|
||||
from .. import const
|
||||
|
||||
|
||||
@@ -174,6 +173,7 @@ class ApplicationTreeNodeMixin:
|
||||
return pid
|
||||
|
||||
def as_tree_node(self, pid, k8s_as_tree=False):
|
||||
from ..utils import KubernetesTree
|
||||
if self.type == const.AppType.k8s and k8s_as_tree:
|
||||
node = KubernetesTree(pid).as_tree_node(self)
|
||||
else:
|
||||
@@ -214,6 +214,8 @@ class ApplicationTreeNodeMixin:
|
||||
|
||||
|
||||
class Application(CommonModelMixin, OrgModelMixin, ApplicationTreeNodeMixin):
|
||||
APP_TYPE = const.AppType
|
||||
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
category = models.CharField(
|
||||
max_length=16, choices=const.AppCategory.choices, verbose_name=_('Category')
|
||||
@@ -255,6 +257,9 @@ class Application(CommonModelMixin, OrgModelMixin, ApplicationTreeNodeMixin):
|
||||
def category_db(self):
|
||||
return self.category == const.AppCategory.db.value
|
||||
|
||||
def is_type(self, tp):
|
||||
return self.type == tp
|
||||
|
||||
def get_rdp_remote_app_setting(self):
|
||||
from applications.serializers.attrs import get_serializer_class_by_application_type
|
||||
if not self.category_remote_app:
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from assets.serializers.base import AuthSerializerMixin
|
||||
from common.drf.serializers import MethodSerializer
|
||||
from common.drf.serializers import MethodSerializer, SecretReadableMixin
|
||||
from .attrs import (
|
||||
category_serializer_classes_mapping,
|
||||
type_serializer_classes_mapping,
|
||||
@@ -16,7 +16,7 @@ from .. import const
|
||||
|
||||
__all__ = [
|
||||
'AppSerializer', 'MiniAppSerializer', 'AppSerializerMixin',
|
||||
'AppAccountSerializer', 'AppAccountSecretSerializer'
|
||||
'AppAccountSerializer', 'AppAccountSecretSerializer', 'AppAccountBackUpSerializer'
|
||||
]
|
||||
|
||||
|
||||
@@ -32,21 +32,23 @@ class AppSerializerMixin(serializers.Serializer):
|
||||
return instance
|
||||
|
||||
def get_attrs_serializer(self):
|
||||
default_serializer = serializers.Serializer(read_only=True)
|
||||
instance = self.app
|
||||
if instance:
|
||||
_type = instance.type
|
||||
_category = instance.category
|
||||
else:
|
||||
_type = self.context['request'].query_params.get('type')
|
||||
_category = self.context['request'].query_params.get('category')
|
||||
if _type:
|
||||
if isinstance(self, AppAccountSecretSerializer):
|
||||
serializer_class = type_secret_serializer_classes_mapping.get(_type)
|
||||
tp = getattr(self, 'tp', None)
|
||||
default_serializer = serializers.Serializer(read_only=True)
|
||||
if not tp:
|
||||
if instance:
|
||||
tp = instance.type
|
||||
category = instance.category
|
||||
else:
|
||||
serializer_class = type_serializer_classes_mapping.get(_type)
|
||||
elif _category:
|
||||
serializer_class = category_serializer_classes_mapping.get(_category)
|
||||
tp = self.context['request'].query_params.get('type')
|
||||
category = self.context['request'].query_params.get('category')
|
||||
if tp:
|
||||
if isinstance(self, AppAccountBackUpSerializer):
|
||||
serializer_class = type_secret_serializer_classes_mapping.get(tp)
|
||||
else:
|
||||
serializer_class = type_serializer_classes_mapping.get(tp)
|
||||
elif category:
|
||||
serializer_class = category_serializer_classes_mapping.get(category)
|
||||
else:
|
||||
serializer_class = default_serializer
|
||||
|
||||
@@ -152,13 +154,8 @@ class AppAccountSerializer(AppSerializerMixin, AuthSerializerMixin, BulkOrgResou
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
class AppAccountSecretSerializer(AppAccountSerializer):
|
||||
class AppAccountSecretSerializer(SecretReadableMixin, AppAccountSerializer):
|
||||
class Meta(AppAccountSerializer.Meta):
|
||||
fields_backup = [
|
||||
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
|
||||
'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': False},
|
||||
'private_key': {'write_only': False},
|
||||
@@ -166,3 +163,22 @@ class AppAccountSecretSerializer(AppAccountSerializer):
|
||||
'app_display': {'label': _('Application display')},
|
||||
'systemuser_display': {'label': _('System User')}
|
||||
}
|
||||
|
||||
|
||||
class AppAccountBackUpSerializer(AppAccountSecretSerializer):
|
||||
class Meta(AppAccountSecretSerializer.Meta):
|
||||
fields = [
|
||||
'id', 'app_display', 'attrs', 'username', 'password', 'private_key',
|
||||
'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.tp = kwargs.pop('tp', None)
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
return queryset
|
||||
|
||||
def to_representation(self, instance):
|
||||
return super(AppAccountSerializer, self).to_representation(instance)
|
||||
|
||||
@@ -13,3 +13,14 @@ class DBSerializer(serializers.Serializer):
|
||||
database = serializers.CharField(
|
||||
max_length=128, required=True, allow_null=True, label=_('Database')
|
||||
)
|
||||
use_ssl = serializers.BooleanField(default=False, label=_('Use SSL'))
|
||||
ca_cert = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('CA certificate')
|
||||
)
|
||||
client_cert = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('Client certificate file')
|
||||
)
|
||||
cert_key = serializers.CharField(
|
||||
required=False, allow_null=True, label=_('Certificate key file')
|
||||
)
|
||||
allow_invalid_cert = serializers.BooleanField(default=False, label=_('Allow invalid cert'))
|
||||
|
||||
@@ -31,7 +31,7 @@ class ExistAssetPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField):
|
||||
|
||||
|
||||
class RemoteAppSerializer(serializers.Serializer):
|
||||
asset_info = serializers.SerializerMethodField()
|
||||
asset_info = serializers.SerializerMethodField(label=_('Asset Info'))
|
||||
asset = ExistAssetPrimaryKeyRelatedField(
|
||||
queryset=Asset.objects, required=True, label=_("Asset"), allow_null=True
|
||||
)
|
||||
|
||||
@@ -6,6 +6,7 @@ from .pgsql import *
|
||||
from .sqlserver import *
|
||||
from .redis import *
|
||||
from .mongodb import *
|
||||
from .clickhouse import *
|
||||
|
||||
from .chrome import *
|
||||
from .mysql_workbench import *
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
from ..application_category import RemoteAppSerializer
|
||||
|
||||
__all__ = ['ChromeSerializer', 'ChromeSecretSerializer']
|
||||
@@ -13,19 +14,21 @@ class ChromeSerializer(RemoteAppSerializer):
|
||||
max_length=128, label=_('Application path'), default=CHROME_PATH, allow_null=True,
|
||||
)
|
||||
chrome_target = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, label=_('Target URL'), allow_null=True,
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Target URL'), allow_null=True,
|
||||
)
|
||||
chrome_username = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, label=_('Chrome username'), allow_null=True,
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Chrome username'), allow_null=True,
|
||||
)
|
||||
chrome_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=True, label=_('Chrome password'),
|
||||
allow_null=True
|
||||
chrome_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Chrome password'), allow_null=True, encrypted_key='chrome_password'
|
||||
)
|
||||
|
||||
|
||||
class ChromeSecretSerializer(ChromeSerializer):
|
||||
chrome_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, read_only=True, label=_('Chrome password'),
|
||||
allow_null=True
|
||||
chrome_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Chrome password'), allow_null=True, write_only=False
|
||||
)
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from ..application_category import DBSerializer
|
||||
|
||||
__all__ = ['ClickHouseSerializer']
|
||||
|
||||
|
||||
class ClickHouseSerializer(DBSerializer):
|
||||
port = serializers.IntegerField(
|
||||
default=9000, label=_('Port'), allow_null=True,
|
||||
help_text=_(
|
||||
'Typically, the port is 9000,'
|
||||
'the HTTP interface and the native interface use different ports'
|
||||
),
|
||||
)
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
from ..application_category import RemoteAppSerializer
|
||||
|
||||
__all__ = ['CustomSerializer', 'CustomSecretSerializer']
|
||||
@@ -19,14 +20,14 @@ class CustomSerializer(RemoteAppSerializer):
|
||||
max_length=128, allow_blank=True, required=False, label=_('Custom Username'),
|
||||
allow_null=True,
|
||||
)
|
||||
custom_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=True, label=_('Custom password'),
|
||||
allow_null=True,
|
||||
custom_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Custom password'), allow_null=True,
|
||||
)
|
||||
|
||||
|
||||
class CustomSecretSerializer(RemoteAppSerializer):
|
||||
custom_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, read_only=True, label=_('Custom password'),
|
||||
allow_null=True,
|
||||
custom_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=False,
|
||||
label=_('Custom password'), allow_null=True,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
from ..application_category import RemoteAppSerializer
|
||||
|
||||
__all__ = ['MySQLWorkbenchSerializer', 'MySQLWorkbenchSecretSerializer']
|
||||
@@ -29,14 +30,14 @@ class MySQLWorkbenchSerializer(RemoteAppSerializer):
|
||||
max_length=128, allow_blank=True, required=False, label=_('Mysql workbench username'),
|
||||
allow_null=True,
|
||||
)
|
||||
mysql_workbench_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=True, label=_('Mysql workbench password'),
|
||||
allow_null=True,
|
||||
mysql_workbench_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Mysql workbench password'), allow_null=True,
|
||||
)
|
||||
|
||||
|
||||
class MySQLWorkbenchSecretSerializer(RemoteAppSerializer):
|
||||
mysql_workbench_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, read_only=True, label=_('Mysql workbench password'),
|
||||
allow_null=True,
|
||||
mysql_workbench_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=False,
|
||||
label=_('Mysql workbench password'), allow_null=True,
|
||||
)
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
from ..application_category import RemoteAppSerializer
|
||||
|
||||
__all__ = ['VMwareClientSerializer', 'VMwareClientSecretSerializer']
|
||||
@@ -25,14 +26,14 @@ class VMwareClientSerializer(RemoteAppSerializer):
|
||||
max_length=128, allow_blank=True, required=False, label=_('Vmware username'),
|
||||
allow_null=True
|
||||
)
|
||||
vmware_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=True, label=_('Vmware password'),
|
||||
allow_null=True
|
||||
vmware_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False,
|
||||
label=_('Vmware password'), allow_null=True
|
||||
)
|
||||
|
||||
|
||||
class VMwareClientSecretSerializer(RemoteAppSerializer):
|
||||
vmware_password = serializers.CharField(
|
||||
max_length=128, allow_blank=True, required=False, read_only=True, label=_('Vmware password'),
|
||||
allow_null=True
|
||||
vmware_password = EncryptedField(
|
||||
max_length=128, allow_blank=True, required=False, write_only=False,
|
||||
label=_('Vmware password'), allow_null=True
|
||||
)
|
||||
|
||||
@@ -31,6 +31,7 @@ type_serializer_classes_mapping = {
|
||||
const.AppType.sqlserver.value: application_type.SQLServerSerializer,
|
||||
const.AppType.redis.value: application_type.RedisSerializer,
|
||||
const.AppType.mongodb.value: application_type.MongoDBSerializer,
|
||||
const.AppType.clickhouse.value: application_type.ClickHouseSerializer,
|
||||
# cloud
|
||||
const.AppType.k8s.value: application_type.K8SSerializer
|
||||
}
|
||||
|
||||
2
apps/applications/signal_handlers.py
Normal file
2
apps/applications/signal_handlers.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
@@ -11,3 +11,4 @@ from .cmd_filter import *
|
||||
from .gathered_user import *
|
||||
from .favorite_asset import *
|
||||
from .account_backup import *
|
||||
from .account_history import *
|
||||
|
||||
50
apps/assets/api/account_history.py
Normal file
50
apps/assets/api/account_history.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from django.db.models import F
|
||||
|
||||
from assets.api.accounts import (
|
||||
AccountFilterSet, AccountViewSet, AccountSecretsViewSet
|
||||
)
|
||||
from common.mixins import RecordViewLogMixin
|
||||
from .. import serializers
|
||||
from ..models import AuthBook
|
||||
|
||||
__all__ = ['AccountHistoryViewSet', 'AccountHistorySecretsViewSet']
|
||||
|
||||
|
||||
class AccountHistoryFilterSet(AccountFilterSet):
|
||||
class Meta:
|
||||
model = AuthBook.history.model
|
||||
fields = AccountFilterSet.Meta.fields
|
||||
|
||||
|
||||
class AccountHistoryViewSet(AccountViewSet):
|
||||
model = AuthBook.history.model
|
||||
filterset_class = AccountHistoryFilterSet
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountHistorySerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_assethistoryaccount',
|
||||
'retrieve': 'assets.view_assethistoryaccount',
|
||||
}
|
||||
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = self.model.objects.all() \
|
||||
.annotate(ip=F('asset__ip')) \
|
||||
.annotate(hostname=F('asset__hostname')) \
|
||||
.annotate(platform=F('asset__platform__name')) \
|
||||
.annotate(protocols=F('asset__protocols'))
|
||||
return queryset
|
||||
|
||||
|
||||
class AccountHistorySecretsViewSet(RecordViewLogMixin, AccountHistoryViewSet):
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountHistorySecretSerializer
|
||||
}
|
||||
http_method_names = ['get']
|
||||
permission_classes = AccountSecretsViewSet.permission_classes
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_assethistoryaccountsecret',
|
||||
'retrieve': 'assets.view_assethistoryaccountsecret',
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.db.models import F, Q
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django_filters import rest_framework as filters
|
||||
from rest_framework.decorators import action
|
||||
@@ -8,12 +8,14 @@ from rest_framework.generics import CreateAPIView
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from common.permissions import NeedMFAVerify
|
||||
from common.mixins import RecordViewLogMixin
|
||||
from common.permissions import UserConfirmation
|
||||
from authentication.const import ConfirmType
|
||||
from ..tasks.account_connectivity import test_accounts_connectivity_manual
|
||||
from ..models import AuthBook, Node
|
||||
from .. import serializers
|
||||
|
||||
__all__ = ['AccountViewSet', 'AccountSecretsViewSet', 'AccountTaskCreateAPI']
|
||||
__all__ = ['AccountFilterSet', 'AccountViewSet', 'AccountSecretsViewSet', 'AccountTaskCreateAPI']
|
||||
|
||||
|
||||
class AccountFilterSet(BaseFilterSet):
|
||||
@@ -79,7 +81,7 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
return Response(data={'task': task.id})
|
||||
|
||||
|
||||
class AccountSecretsViewSet(AccountViewSet):
|
||||
class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
||||
"""
|
||||
因为可能要导出所有账号,所以单独建立了一个 viewset
|
||||
"""
|
||||
@@ -87,7 +89,7 @@ class AccountSecretsViewSet(AccountViewSet):
|
||||
'default': serializers.AccountSecretSerializer
|
||||
}
|
||||
http_method_names = ['get']
|
||||
permission_classes = [RBACPermission, NeedMFAVerify]
|
||||
permission_classes = [RBACPermission, UserConfirmation.require(ConfirmType.MFA)]
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_assetaccountsecret',
|
||||
'retrieve': 'assets.view_assetaccountsecret',
|
||||
|
||||
@@ -6,7 +6,7 @@ from django.shortcuts import get_object_or_404
|
||||
from django.db.models import Q
|
||||
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from common.mixins.api import SuggestionMixin, RenderToJsonMixin
|
||||
from users.models import User, UserGroup
|
||||
from users.serializers import UserSerializer, UserGroupSerializer
|
||||
from users.filters import UserFilter
|
||||
@@ -88,7 +88,7 @@ class AssetPlatformRetrieveApi(RetrieveAPIView):
|
||||
return asset.platform
|
||||
|
||||
|
||||
class AssetPlatformViewSet(ModelViewSet):
|
||||
class AssetPlatformViewSet(ModelViewSet, RenderToJsonMixin):
|
||||
queryset = Platform.objects.all()
|
||||
serializer_class = serializers.PlatformSerializer
|
||||
filterset_fields = ['name', 'base']
|
||||
|
||||
@@ -69,7 +69,7 @@ class CommandConfirmAPI(CreateAPIView):
|
||||
external=True, api_to_ui=True
|
||||
)
|
||||
ticket_detail_url = '{url}?type={type}'.format(url=ticket_detail_url, type=ticket.type)
|
||||
ticket_assignees = ticket.current_node.first().ticket_assignees.all()
|
||||
ticket_assignees = ticket.current_step.ticket_assignees.all()
|
||||
return {
|
||||
'check_confirm_status': {'method': 'GET', 'url': confirm_status_url},
|
||||
'close_confirm': {'method': 'DELETE', 'url': confirm_status_url},
|
||||
|
||||
@@ -24,7 +24,7 @@ class SerializeToTreeNodeMixin:
|
||||
'title': _name(node),
|
||||
'pId': node.parent_key,
|
||||
'isParent': True,
|
||||
'open': node.is_org_root(),
|
||||
'open': True,
|
||||
'meta': {
|
||||
'data': {
|
||||
"id": node.id,
|
||||
|
||||
@@ -43,7 +43,7 @@ __all__ = [
|
||||
class NodeViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
model = Node
|
||||
filterset_fields = ('value', 'key', 'id')
|
||||
search_fields = ('value',)
|
||||
search_fields = ('full_value',)
|
||||
serializer_class = serializers.NodeSerializer
|
||||
rbac_perms = {
|
||||
'match': 'assets.match_node',
|
||||
@@ -101,6 +101,8 @@ class NodeListAsTreeApi(generics.ListAPIView):
|
||||
|
||||
class NodeChildrenApi(generics.ListCreateAPIView):
|
||||
serializer_class = serializers.NodeSerializer
|
||||
search_fields = ('value',)
|
||||
|
||||
instance = None
|
||||
is_initial = False
|
||||
|
||||
@@ -179,8 +181,15 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||
"""
|
||||
model = Node
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
if not self.request.GET.get('search'):
|
||||
return queryset
|
||||
queryset = super().filter_queryset(queryset)
|
||||
queryset = self.model.get_ancestor_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
nodes = self.get_queryset().order_by('value')
|
||||
nodes = self.filter_queryset(self.get_queryset()).order_by('value')
|
||||
nodes = self.serialize_nodes(nodes, with_asset_amount=True)
|
||||
assets = self.get_assets()
|
||||
data = [*nodes, *assets]
|
||||
|
||||
@@ -4,7 +4,6 @@ from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.utils.crypto import get_aes_crypto
|
||||
from common.permissions import IsValidUser
|
||||
from common.mixins.api import SuggestionMixin
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
@@ -102,27 +101,17 @@ class SystemUserTempAuthInfoApi(generics.CreateAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
serializer_class = SystemUserTempAuthSerializer
|
||||
|
||||
def decrypt_data_if_need(self, data):
|
||||
csrf_token = self.request.META.get('CSRF_COOKIE')
|
||||
aes = get_aes_crypto(csrf_token, 'ECB')
|
||||
password = data.get('password', '')
|
||||
try:
|
||||
data['password'] = aes.decrypt(password)
|
||||
except:
|
||||
pass
|
||||
return data
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = super().get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
pk = kwargs.get('pk')
|
||||
data = self.decrypt_data_if_need(serializer.validated_data)
|
||||
instance_id = data.get('instance_id')
|
||||
data = serializer.validated_data
|
||||
asset_or_app_id = data.get('instance_id')
|
||||
|
||||
with tmp_to_root_org():
|
||||
instance = get_object_or_404(SystemUser, pk=pk)
|
||||
instance.set_temp_auth(instance_id, self.request.user.id, data)
|
||||
instance.set_temp_auth(asset_or_app_id, self.request.user.id, data)
|
||||
return Response(serializer.data, status=201)
|
||||
|
||||
|
||||
@@ -219,7 +208,7 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
|
||||
class SystemUserCommandFilterRuleListApi(generics.ListAPIView):
|
||||
rbac_perms = {
|
||||
'list': 'assets.view_commandfilterule'
|
||||
'list': 'assets.view_commandfilterule',
|
||||
}
|
||||
|
||||
def get_serializer_class(self):
|
||||
@@ -234,12 +223,14 @@ class SystemUserCommandFilterRuleListApi(generics.ListAPIView):
|
||||
if not system_user:
|
||||
system_user_id = self.request.query_params.get('system_user_id')
|
||||
asset_id = self.request.query_params.get('asset_id')
|
||||
node_id = self.request.query_params.get('node_id')
|
||||
application_id = self.request.query_params.get('application_id')
|
||||
rules = CommandFilterRule.get_queryset(
|
||||
user_id=user_id,
|
||||
user_group_id=user_group_id,
|
||||
system_user_id=system_user_id,
|
||||
asset_id=asset_id,
|
||||
node_id=node_id,
|
||||
application_id=application_id
|
||||
)
|
||||
return rules
|
||||
|
||||
@@ -21,8 +21,8 @@ class Migration(migrations.Migration):
|
||||
('name', models.CharField(max_length=64, verbose_name='Name')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True)),
|
||||
('date_updated', models.DateTimeField(auto_now=True)),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('created_by', models.CharField(blank=True, default='', max_length=128, verbose_name='Created by')),
|
||||
],
|
||||
options={
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Generated by Django 2.1.7 on 2019-06-24 13:08
|
||||
|
||||
import assets.models.utils
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
@@ -15,61 +15,61 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='adminuser',
|
||||
name='_password',
|
||||
field=common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
field=common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='adminuser',
|
||||
name='_private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='adminuser',
|
||||
name='_public_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='authbook',
|
||||
name='_password',
|
||||
field=common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
field=common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='authbook',
|
||||
name='_private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='authbook',
|
||||
name='_public_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='gateway',
|
||||
name='_password',
|
||||
field=common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
field=common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='gateway',
|
||||
name='_private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='gateway',
|
||||
name='_public_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='_password',
|
||||
field=common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
field=common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='_private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, validators=[assets.models.utils.private_key_validator], verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='_public_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Generated by Django 2.1.7 on 2019-07-11 12:18
|
||||
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
@@ -14,21 +14,21 @@ class Migration(migrations.Migration):
|
||||
migrations.AlterField(
|
||||
model_name='adminuser',
|
||||
name='private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='authbook',
|
||||
name='private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='gateway',
|
||||
name='private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='private_key',
|
||||
field=common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
field=common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key'),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Generated by Django 2.2.7 on 2019-12-06 07:26
|
||||
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ class Migration(migrations.Migration):
|
||||
('name', models.SlugField(allow_unicode=True, unique=True, verbose_name='Name')),
|
||||
('base', models.CharField(choices=[('Linux', 'Linux'), ('Unix', 'Unix'), ('MacOS', 'MacOS'), ('BSD', 'BSD'), ('Windows', 'Windows'), ('Other', 'Other')], default='Linux', max_length=16, verbose_name='Base')),
|
||||
('charset', models.CharField(choices=[('utf8', 'UTF-8'), ('gbk', 'GBK')], default='utf8', max_length=8, verbose_name='Charset')),
|
||||
('meta', common.fields.model.JsonDictTextField(blank=True, null=True, verbose_name='Meta')),
|
||||
('meta', common.db.fields.JsonDictTextField(blank=True, null=True, verbose_name='Meta')),
|
||||
('internal', models.BooleanField(default=False, verbose_name='Internal')),
|
||||
('comment', models.TextField(blank=True, null=True, verbose_name='Comment')),
|
||||
],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Generated by Django 3.1.6 on 2021-06-05 16:10
|
||||
|
||||
import common.fields.model
|
||||
import common.db.fields
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
@@ -58,9 +58,9 @@ class Migration(migrations.Migration):
|
||||
('id', models.UUIDField(db_index=True, default=uuid.uuid4)),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('username', models.CharField(blank=True, db_index=True, max_length=128, validators=[django.core.validators.RegexValidator('^[0-9a-zA-Z_@\\-\\.]*$', 'Special char not allowed')], verbose_name='Username')),
|
||||
('password', common.fields.model.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.fields.model.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('password', common.db.fields.EncryptCharField(blank=True, max_length=256, null=True, verbose_name='Password')),
|
||||
('private_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH private key')),
|
||||
('public_key', common.db.fields.EncryptTextField(blank=True, null=True, verbose_name='SSH public key')),
|
||||
('comment', models.TextField(blank=True, verbose_name='Comment')),
|
||||
('date_created', models.DateTimeField(blank=True, editable=False, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(blank=True, editable=False, verbose_name='Date updated')),
|
||||
|
||||
@@ -20,7 +20,7 @@ class Migration(migrations.Migration):
|
||||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('is_periodic', models.BooleanField(default=False)),
|
||||
('is_periodic', models.BooleanField(default=False, verbose_name='Periodic perform')),
|
||||
('interval', models.IntegerField(blank=True, default=24, null=True, verbose_name='Cycle perform')),
|
||||
('crontab', models.CharField(blank=True, max_length=128, null=True, verbose_name='Regularly perform')),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
|
||||
@@ -3,6 +3,19 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def create_internal_platform(apps, schema_editor):
|
||||
model = apps.get_model("assets", "Platform")
|
||||
db_alias = schema_editor.connection.alias
|
||||
type_platforms = (
|
||||
('AIX', 'Unix', None),
|
||||
)
|
||||
for name, base, meta in type_platforms:
|
||||
defaults = {'name': name, 'base': base, 'meta': meta, 'internal': True}
|
||||
model.objects.using(db_alias).update_or_create(
|
||||
name=name, defaults=defaults
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
@@ -15,4 +28,5 @@ class Migration(migrations.Migration):
|
||||
name='number',
|
||||
field=models.CharField(blank=True, max_length=128, null=True, verbose_name='Asset number'),
|
||||
),
|
||||
migrations.RunPython(create_internal_platform)
|
||||
]
|
||||
|
||||
26
apps/assets/migrations/0091_auto_20220629_1826.py
Normal file
26
apps/assets/migrations/0091_auto_20220629_1826.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# Generated by Django 3.1.14 on 2022-06-29 10:26
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0090_auto_20220412_1145'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='authbook',
|
||||
options={'permissions': [('test_authbook', 'Can test asset account connectivity'), ('view_assetaccountsecret', 'Can view asset account secret'), ('change_assetaccountsecret', 'Can change asset account secret'), ('view_assethistoryaccount', 'Can view asset history account'), ('view_assethistoryaccountsecret', 'Can view asset history account secret')], 'verbose_name': 'AuthBook'},
|
||||
),
|
||||
migrations.AlterModelOptions(
|
||||
name='historicalauthbook',
|
||||
options={'get_latest_by': ('history_date', 'history_id'), 'ordering': ('-history_date', '-history_id'), 'verbose_name': 'historical AuthBook', 'verbose_name_plural': 'historical AuthBooks'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='historicalauthbook',
|
||||
name='history_date',
|
||||
field=models.DateTimeField(db_index=True),
|
||||
),
|
||||
]
|
||||
18
apps/assets/migrations/0092_commandfilter_nodes.py
Normal file
18
apps/assets/migrations/0092_commandfilter_nodes.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.15 on 2022-10-09 09:55
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0091_auto_20220629_1826'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='commandfilter',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(blank=True, related_name='cmd_filters', to='assets.Node', verbose_name='Nodes'),
|
||||
),
|
||||
]
|
||||
18
apps/assets/migrations/0093_alter_systemuser_protocol.py
Normal file
18
apps/assets/migrations/0093_alter_systemuser_protocol.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.2.14 on 2022-11-04 07:06
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0092_commandfilter_nodes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='protocol',
|
||||
field=models.CharField(choices=[('ssh', 'SSH'), ('rdp', 'RDP'), ('telnet', 'Telnet'), ('vnc', 'VNC'), ('mysql', 'MySQL'), ('oracle', 'Oracle'), ('mariadb', 'MariaDB'), ('postgresql', 'PostgreSQL'), ('sqlserver', 'SQLServer'), ('redis', 'Redis'), ('mongodb', 'MongoDB'), ('clickhouse', 'ClickHouse'), ('k8s', 'K8S')], default='ssh', max_length=16, verbose_name='Protocol'),
|
||||
),
|
||||
]
|
||||
@@ -11,7 +11,7 @@ from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from common.fields.model import JsonDictTextField
|
||||
from common.db.fields import JsonDictTextField
|
||||
from common.utils import lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
|
||||
@@ -116,9 +116,9 @@ class NodesRelationMixin:
|
||||
nodes = []
|
||||
for node in self.get_nodes():
|
||||
_nodes = node.get_ancestors(with_self=True)
|
||||
nodes.append(_nodes)
|
||||
nodes.extend(list(_nodes))
|
||||
if flat:
|
||||
nodes = list(reduce(lambda x, y: set(x) | set(y), nodes))
|
||||
nodes = list(set([node.id for node in nodes]))
|
||||
return nodes
|
||||
|
||||
|
||||
@@ -301,7 +301,7 @@ class Asset(AbsConnectivity, AbsHardwareInfo, ProtocolsMixin, NodesRelationMixin
|
||||
'private_key': auth_user.private_key_file
|
||||
}
|
||||
|
||||
if not with_become:
|
||||
if not with_become or self.is_windows():
|
||||
return info
|
||||
|
||||
if become_user:
|
||||
|
||||
@@ -29,7 +29,9 @@ class AuthBook(BaseUser, AbsConnectivity):
|
||||
permissions = [
|
||||
('test_authbook', _('Can test asset account connectivity')),
|
||||
('view_assetaccountsecret', _('Can view asset account secret')),
|
||||
('change_assetaccountsecret', _('Can change asset account secret'))
|
||||
('change_assetaccountsecret', _('Can change asset account secret')),
|
||||
('view_assethistoryaccount', _('Can view asset history account')),
|
||||
('view_assethistoryaccountsecret', _('Can view asset history account secret')),
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
||||
@@ -1,28 +1,27 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import io
|
||||
import os
|
||||
import uuid
|
||||
from hashlib import md5
|
||||
|
||||
import sshpubkeys
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.db.models import QuerySet
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from common.utils import random_string, signer
|
||||
from common.db import fields
|
||||
from common.utils import random_string
|
||||
from common.utils import (
|
||||
ssh_key_string_to_obj, ssh_key_gen, get_logger, lazyproperty
|
||||
)
|
||||
from common.utils.encode import ssh_pubkey_gen
|
||||
from common.validators import alphanumeric
|
||||
from common import fields
|
||||
from common.utils.encode import (
|
||||
parse_ssh_public_key_str, parse_ssh_private_key_str
|
||||
)
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@@ -64,16 +63,16 @@ class AuthMixin:
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self):
|
||||
public_key = None
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
try:
|
||||
public_key = ssh_pubkey_gen(private_key=self.private_key, password=self.password)
|
||||
public_key = parse_ssh_public_key_str(self.private_key, password=self.password)
|
||||
except IOError as e:
|
||||
return str(e)
|
||||
else:
|
||||
if not public_key:
|
||||
return ''
|
||||
|
||||
public_key_obj = sshpubkeys.SSHKey(public_key)
|
||||
fingerprint = public_key_obj.hash_md5()
|
||||
return fingerprint
|
||||
@@ -88,24 +87,27 @@ class AuthMixin:
|
||||
|
||||
@property
|
||||
def private_key_file(self):
|
||||
if not self.private_key_obj:
|
||||
if not self.private_key:
|
||||
return None
|
||||
private_key_str = parse_ssh_private_key_str(self.private_key,
|
||||
password=self.password)
|
||||
if not private_key_str:
|
||||
return None
|
||||
project_dir = settings.PROJECT_DIR
|
||||
tmp_dir = os.path.join(project_dir, 'tmp')
|
||||
key_name = '.' + md5(self.private_key.encode('utf-8')).hexdigest()
|
||||
key_path = os.path.join(tmp_dir, key_name)
|
||||
if not os.path.exists(key_path):
|
||||
self.private_key_obj.write_private_key_file(key_path)
|
||||
with open(key_path, 'w') as f:
|
||||
f.write(private_key_str)
|
||||
os.chmod(key_path, 0o400)
|
||||
return key_path
|
||||
|
||||
def get_private_key(self):
|
||||
if not self.private_key_obj:
|
||||
if not self.private_key:
|
||||
return None
|
||||
string_io = io.StringIO()
|
||||
self.private_key_obj.write_private_key(string_io)
|
||||
private_key = string_io.getvalue()
|
||||
return private_key
|
||||
return parse_ssh_private_key_str(self.private_key,
|
||||
password=self.password)
|
||||
|
||||
@property
|
||||
def public_key_obj(self):
|
||||
@@ -234,4 +236,3 @@ class BaseUser(OrgModelMixin, AuthMixin):
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from users.models import User, UserGroup
|
||||
from applications.models import Application
|
||||
from ..models import SystemUser, Asset
|
||||
from ..models import SystemUser, Asset, Node
|
||||
|
||||
from common.utils import lazyproperty, get_logger, get_object_or_none
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
@@ -33,6 +33,10 @@ class CommandFilter(OrgModelMixin):
|
||||
'users.UserGroup', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("User group"),
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("Nodes")
|
||||
)
|
||||
assets = models.ManyToManyField(
|
||||
'assets.Asset', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("Asset")
|
||||
@@ -46,8 +50,8 @@ class CommandFilter(OrgModelMixin):
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||
comment = models.TextField(blank=True, default='', verbose_name=_("Comment"))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
date_updated = models.DateTimeField(auto_now=True)
|
||||
date_created = models.DateTimeField(auto_now_add=True, verbose_name=_('Date created'))
|
||||
date_updated = models.DateTimeField(auto_now=True, verbose_name=_('Date updated'))
|
||||
created_by = models.CharField(
|
||||
max_length=128, blank=True, default='', verbose_name=_('Created by')
|
||||
)
|
||||
@@ -125,6 +129,9 @@ class CommandFilterRule(OrgModelMixin):
|
||||
regex.append(cmd)
|
||||
continue
|
||||
|
||||
if not cmd:
|
||||
continue
|
||||
|
||||
# 如果是单个字符
|
||||
if cmd[-1].isalpha():
|
||||
regex.append(r'\b{0}\b'.format(cmd))
|
||||
@@ -165,29 +172,29 @@ class CommandFilterRule(OrgModelMixin):
|
||||
|
||||
def create_command_confirm_ticket(self, run_command, session, cmd_filter_rule, org_id):
|
||||
from tickets.const import TicketType
|
||||
from tickets.models import Ticket
|
||||
from tickets.models import ApplyCommandTicket
|
||||
data = {
|
||||
'title': _('Command confirm') + ' ({})'.format(session.user),
|
||||
'type': TicketType.command_confirm,
|
||||
'meta': {
|
||||
'apply_run_user': session.user,
|
||||
'apply_run_asset': session.asset,
|
||||
'apply_run_system_user': session.system_user,
|
||||
'apply_run_command': run_command,
|
||||
'apply_from_session_id': str(session.id),
|
||||
'apply_from_cmd_filter_rule_id': str(cmd_filter_rule.id),
|
||||
'apply_from_cmd_filter_id': str(cmd_filter_rule.filter.id)
|
||||
},
|
||||
'applicant': session.user_obj,
|
||||
'apply_run_user_id': session.user_id,
|
||||
'apply_run_asset': str(session.asset),
|
||||
'apply_run_system_user_id': session.system_user_id,
|
||||
'apply_run_command': run_command[:4090],
|
||||
'apply_from_session_id': str(session.id),
|
||||
'apply_from_cmd_filter_rule_id': str(cmd_filter_rule.id),
|
||||
'apply_from_cmd_filter_id': str(cmd_filter_rule.filter.id),
|
||||
'org_id': org_id,
|
||||
}
|
||||
ticket = Ticket.objects.create(**data)
|
||||
ticket.create_process_map_and_node(self.reviewers.all())
|
||||
ticket.open(applicant=session.user_obj)
|
||||
ticket = ApplyCommandTicket.objects.create(**data)
|
||||
assignees = self.reviewers.all()
|
||||
ticket.open_by_system(assignees)
|
||||
return ticket
|
||||
|
||||
@classmethod
|
||||
def get_queryset(cls, user_id=None, user_group_id=None, system_user_id=None,
|
||||
asset_id=None, application_id=None, org_id=None):
|
||||
asset_id=None, node_id=None, application_id=None, org_id=None):
|
||||
# user & user_group
|
||||
user_groups = []
|
||||
user = get_object_or_none(User, pk=user_id)
|
||||
if user:
|
||||
@@ -196,8 +203,18 @@ class CommandFilterRule(OrgModelMixin):
|
||||
if user_group:
|
||||
org_id = user_group.org_id
|
||||
user_groups.append(user_group)
|
||||
system_user = get_object_or_none(SystemUser, pk=system_user_id)
|
||||
|
||||
# asset & node
|
||||
nodes = []
|
||||
asset = get_object_or_none(Asset, pk=asset_id)
|
||||
if asset:
|
||||
nodes.extend(asset.get_all_nodes())
|
||||
node = get_object_or_none(Node, pk=node_id)
|
||||
if node:
|
||||
org_id = node.org_id
|
||||
nodes.extend(list(node.get_ancestors(with_self=True)))
|
||||
|
||||
system_user = get_object_or_none(SystemUser, pk=system_user_id)
|
||||
application = get_object_or_none(Application, pk=application_id)
|
||||
q = Q()
|
||||
if user:
|
||||
@@ -210,6 +227,8 @@ class CommandFilterRule(OrgModelMixin):
|
||||
if asset:
|
||||
org_id = asset.org_id
|
||||
q |= Q(assets=asset)
|
||||
if nodes:
|
||||
q |= Q(nodes__in=set(nodes))
|
||||
if application:
|
||||
org_id = application.org_id
|
||||
q |= Q(applications=application)
|
||||
|
||||
@@ -9,7 +9,7 @@ import paramiko
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.utils import get_logger, lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from .base import BaseUser
|
||||
|
||||
@@ -36,7 +36,7 @@ class Domain(OrgModelMixin):
|
||||
def has_gateway(self):
|
||||
return self.gateway_set.filter(is_active=True).exists()
|
||||
|
||||
@property
|
||||
@lazyproperty
|
||||
def gateways(self):
|
||||
return self.gateway_set.filter(is_active=True)
|
||||
|
||||
@@ -44,8 +44,9 @@ class Domain(OrgModelMixin):
|
||||
gateways = [gw for gw in self.gateways if gw.is_connective]
|
||||
if gateways:
|
||||
return random.choice(gateways)
|
||||
else:
|
||||
logger.warn(f'Gateway all bad. domain={self}, gateway_num={len(self.gateways)}.')
|
||||
|
||||
logger.warn(f'Gateway all bad. domain={self}, gateway_num={len(self.gateways)}.')
|
||||
if self.gateways:
|
||||
return random.choice(self.gateways)
|
||||
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from orgs.utils import get_current_org, tmp_to_org, tmp_to_root_org
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
__all__ = ['Node', 'FamilyMixin', 'compute_parent_key', 'NodeQuerySet']
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -98,6 +97,14 @@ class FamilyMixin:
|
||||
q |= Q(key=self.key)
|
||||
return Node.objects.filter(q)
|
||||
|
||||
@classmethod
|
||||
def get_ancestor_queryset(cls, queryset, with_self=True):
|
||||
parent_keys = set()
|
||||
for i in queryset:
|
||||
parent_keys.update(set(i.get_ancestor_keys(with_self=with_self)))
|
||||
queryset = queryset.model.objects.filter(key__in=list(parent_keys)).distinct()
|
||||
return queryset
|
||||
|
||||
@property
|
||||
def children(self):
|
||||
return self.get_children(with_self=False)
|
||||
@@ -396,7 +403,7 @@ class NodeAllAssetsMappingMixin:
|
||||
mapping[ancestor_key].update(asset_ids)
|
||||
|
||||
t3 = time.time()
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2-t1, t3-t2))
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2 - t1, t3 - t2))
|
||||
return mapping
|
||||
|
||||
|
||||
|
||||
@@ -34,6 +34,7 @@ class ProtocolMixin:
|
||||
sqlserver = 'sqlserver', 'SQLServer'
|
||||
redis = 'redis', 'Redis'
|
||||
mongodb = 'mongodb', 'MongoDB'
|
||||
clickhouse = 'clickhouse', 'ClickHouse'
|
||||
k8s = 'k8s', 'K8S'
|
||||
|
||||
SUPPORT_PUSH_PROTOCOLS = [Protocol.ssh, Protocol.rdp]
|
||||
@@ -46,7 +47,7 @@ class ProtocolMixin:
|
||||
]
|
||||
APPLICATION_CATEGORY_DB_PROTOCOLS = [
|
||||
Protocol.mysql, Protocol.mariadb, Protocol.oracle,
|
||||
Protocol.postgresql, Protocol.sqlserver,
|
||||
Protocol.postgresql, Protocol.sqlserver, Protocol.clickhouse,
|
||||
Protocol.redis, Protocol.mongodb
|
||||
]
|
||||
APPLICATION_CATEGORY_CLOUD_PROTOCOLS = [
|
||||
@@ -133,6 +134,15 @@ class AuthMixin:
|
||||
self.password = password
|
||||
|
||||
def load_app_more_auth(self, app_id=None, username=None, user_id=None):
|
||||
# 清除认证信息
|
||||
self._clean_auth_info_if_manual_login_mode()
|
||||
|
||||
# 先加载临时认证信息
|
||||
if self.login_mode == self.LOGIN_MANUAL:
|
||||
self._load_tmp_auth_if_has(app_id, user_id)
|
||||
return
|
||||
|
||||
# Remote app
|
||||
from applications.models import Application
|
||||
app = get_object_or_none(Application, pk=app_id)
|
||||
if app and app.category_remote_app:
|
||||
@@ -141,11 +151,6 @@ class AuthMixin:
|
||||
return
|
||||
|
||||
# Other app
|
||||
self._clean_auth_info_if_manual_login_mode()
|
||||
# 加载临时认证信息
|
||||
if self.login_mode == self.LOGIN_MANUAL:
|
||||
self._load_tmp_auth_if_has(app_id, user_id)
|
||||
return
|
||||
# 更新用户名
|
||||
from users.models import User
|
||||
user = get_object_or_none(User, pk=user_id) if user_id else None
|
||||
|
||||
@@ -20,6 +20,6 @@ class AccountBackupExecutionTaskMsg(object):
|
||||
"please go to personal information -> file encryption password to set the encryption password").format(name)
|
||||
|
||||
def publish(self, attachment_list=None):
|
||||
send_mail_attachment_async.delay(
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachment_list
|
||||
)
|
||||
|
||||
@@ -11,4 +11,5 @@ from .cmd_filter import *
|
||||
from .gathered_user import *
|
||||
from .favorite_asset import *
|
||||
from .account import *
|
||||
from .account_history import *
|
||||
from .backup import *
|
||||
|
||||
@@ -5,8 +5,8 @@ from assets.models import AuthBook
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
from .base import AuthSerializerMixin
|
||||
from .utils import validate_password_contains_left_double_curly_bracket
|
||||
from common.utils.encode import ssh_pubkey_gen
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
|
||||
|
||||
class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
@@ -31,10 +31,6 @@ class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
fields = fields_small + fields_fk
|
||||
extra_kwargs = {
|
||||
'username': {'required': True},
|
||||
'password': {
|
||||
'write_only': True,
|
||||
"validators": [validate_password_contains_left_double_curly_bracket]
|
||||
},
|
||||
'private_key': {'write_only': True},
|
||||
'public_key': {'write_only': True},
|
||||
'systemuser_display': {'label': _('System user display')}
|
||||
@@ -57,7 +53,15 @@ class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
return attrs
|
||||
|
||||
def get_protocols(self, v):
|
||||
return v.protocols.replace(' ', ', ')
|
||||
""" protocols 是 queryset 中返回的,Post 创建成功后返回序列化时没有这个字段 """
|
||||
if hasattr(v, 'protocols'):
|
||||
protocols = v.protocols
|
||||
elif hasattr(v, 'asset') and v.asset:
|
||||
protocols = v.asset.protocols
|
||||
else:
|
||||
protocols = ''
|
||||
protocols = protocols.replace(' ', ', ')
|
||||
return protocols
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
@@ -70,12 +74,8 @@ class AccountSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
return super().to_representation(instance)
|
||||
|
||||
|
||||
class AccountSecretSerializer(AccountSerializer):
|
||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields_backup = [
|
||||
'hostname', 'ip', 'platform', 'protocols', 'username', 'password',
|
||||
'private_key', 'public_key', 'date_created', 'date_updated', 'version'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': False},
|
||||
'private_key': {'write_only': False},
|
||||
@@ -84,6 +84,22 @@ class AccountSecretSerializer(AccountSerializer):
|
||||
}
|
||||
|
||||
|
||||
class AccountBackUpSerializer(AccountSecretSerializer):
|
||||
class Meta(AccountSecretSerializer.Meta):
|
||||
fields = [
|
||||
'id', 'hostname', 'ip', 'username', 'password',
|
||||
'private_key', 'public_key', 'date_created',
|
||||
'date_updated', 'version'
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
return queryset
|
||||
|
||||
def to_representation(self, instance):
|
||||
return super(AccountSerializer, self).to_representation(instance)
|
||||
|
||||
|
||||
class AccountTaskSerializer(serializers.Serializer):
|
||||
ACTION_CHOICES = (
|
||||
('test', 'test'),
|
||||
|
||||
38
apps/assets/serializers/account_history.py
Normal file
38
apps/assets/serializers/account_history.py
Normal file
@@ -0,0 +1,38 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from assets.models import AuthBook
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
from .account import AccountSerializer, AccountSecretSerializer
|
||||
|
||||
|
||||
class AccountHistorySerializer(AccountSerializer):
|
||||
systemuser_display = serializers.SerializerMethodField(label=_('System user display'))
|
||||
|
||||
class Meta:
|
||||
model = AuthBook.history.model
|
||||
fields = AccountSerializer.Meta.fields_mini + \
|
||||
AccountSerializer.Meta.fields_write_only + \
|
||||
AccountSerializer.Meta.fields_fk + \
|
||||
['history_id', 'date_created', 'date_updated']
|
||||
read_only_fields = fields
|
||||
ref_name = 'AccountHistorySerializer'
|
||||
|
||||
@staticmethod
|
||||
def get_systemuser_display(instance):
|
||||
if not instance.systemuser:
|
||||
return ''
|
||||
return str(instance.systemuser)
|
||||
|
||||
def get_field_names(self, declared_fields, info):
|
||||
fields = super().get_field_names(declared_fields, info)
|
||||
fields = list(set(fields) - {'org_name'})
|
||||
return fields
|
||||
|
||||
def to_representation(self, instance):
|
||||
return super(AccountSerializer, self).to_representation(instance)
|
||||
|
||||
|
||||
class AccountHistorySecretSerializer(SecretReadableMixin, AccountHistorySerializer):
|
||||
class Meta(AccountHistorySerializer.Meta):
|
||||
extra_kwargs = AccountSecretSerializer.Meta.extra_kwargs
|
||||
@@ -189,6 +189,9 @@ class PlatformSerializer(serializers.ModelSerializer):
|
||||
'id', 'name', 'base', 'charset',
|
||||
'internal', 'meta', 'comment'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'internal': {'read_only': True},
|
||||
}
|
||||
|
||||
|
||||
class AssetSimpleSerializer(serializers.ModelSerializer):
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from io import StringIO
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils import ssh_pubkey_gen, ssh_private_key_gen, validate_ssh_private_key
|
||||
from assets.models import Type
|
||||
from common.drf.fields import EncryptedField
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str, parse_ssh_public_key_str
|
||||
from .utils import validate_password_for_ansible
|
||||
|
||||
|
||||
class AuthSerializer(serializers.ModelSerializer):
|
||||
password = serializers.CharField(required=False, allow_blank=True, allow_null=True, max_length=1024)
|
||||
private_key = serializers.CharField(required=False, allow_blank=True, allow_null=True, max_length=4096)
|
||||
password = EncryptedField(required=False, allow_blank=True, allow_null=True, max_length=1024, label=_('Password'))
|
||||
private_key = EncryptedField(required=False, allow_blank=True, allow_null=True, max_length=16384,
|
||||
label=_('Private key'))
|
||||
|
||||
def gen_keys(self, private_key=None, password=None):
|
||||
if private_key is None:
|
||||
return None, None
|
||||
public_key = ssh_pubkey_gen(private_key=private_key, password=password)
|
||||
public_key = parse_ssh_public_key_str(text=private_key, password=password)
|
||||
return private_key, public_key
|
||||
|
||||
def save(self, **kwargs):
|
||||
@@ -31,6 +33,13 @@ class AuthSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class AuthSerializerMixin(serializers.ModelSerializer):
|
||||
password = EncryptedField(
|
||||
label=_('Password'), required=False, allow_blank=True, allow_null=True, max_length=1024,
|
||||
validators=[validate_password_for_ansible]
|
||||
)
|
||||
private_key = EncryptedField(
|
||||
label=_('SSH private key'), required=False, allow_blank=True, allow_null=True, max_length=16384
|
||||
)
|
||||
passphrase = serializers.CharField(
|
||||
allow_blank=True, allow_null=True, required=False, max_length=512,
|
||||
write_only=True, label=_('Key password')
|
||||
@@ -48,10 +57,7 @@ class AuthSerializerMixin(serializers.ModelSerializer):
|
||||
if not valid:
|
||||
raise serializers.ValidationError(_("private key invalid or passphrase error"))
|
||||
|
||||
private_key = ssh_private_key_gen(private_key, password=passphrase)
|
||||
string_io = StringIO()
|
||||
private_key.write_private_key(string_io)
|
||||
private_key = string_io.getvalue()
|
||||
private_key = parse_ssh_private_key_str(private_key, password=passphrase)
|
||||
return private_key
|
||||
|
||||
def validate_public_key(self, public_key):
|
||||
|
||||
@@ -21,7 +21,7 @@ class CommandFilterSerializer(BulkOrgResourceModelSerializer):
|
||||
'comment', 'created_by',
|
||||
]
|
||||
fields_fk = ['rules']
|
||||
fields_m2m = ['users', 'user_groups', 'system_users', 'assets', 'applications']
|
||||
fields_m2m = ['users', 'user_groups', 'system_users', 'nodes', 'assets', 'applications']
|
||||
fields = fields_small + fields_fk + fields_m2m
|
||||
extra_kwargs = {
|
||||
'rules': {'read_only': True},
|
||||
|
||||
@@ -5,6 +5,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.validators import alphanumeric
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
from ..models import Domain, Gateway
|
||||
from .base import AuthSerializerMixin
|
||||
|
||||
@@ -67,7 +68,7 @@ class GatewaySerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class GatewayWithAuthSerializer(GatewaySerializer):
|
||||
class GatewayWithAuthSerializer(SecretReadableMixin, GatewaySerializer):
|
||||
class Meta(GatewaySerializer.Meta):
|
||||
extra_kwargs = {
|
||||
'password': {'write_only': False},
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.db.models import Count
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.fields import EncryptedField
|
||||
from common.drf.serializers import SecretReadableMixin
|
||||
from common.mixins.serializers import BulkSerializerMixin
|
||||
from common.utils import ssh_pubkey_gen
|
||||
from common.utils import parse_ssh_public_key_str
|
||||
from common.validators import alphanumeric_re, alphanumeric_cn_re, alphanumeric_win_re
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ..models import SystemUser, Asset
|
||||
from .utils import validate_password_contains_left_double_curly_bracket
|
||||
from .base import AuthSerializerMixin
|
||||
from .utils import validate_password_for_ansible
|
||||
from ..models import SystemUser, Asset
|
||||
|
||||
__all__ = [
|
||||
'SystemUserSerializer', 'MiniSystemUserSerializer',
|
||||
@@ -23,9 +25,17 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
"""
|
||||
系统用户
|
||||
"""
|
||||
password = EncryptedField(
|
||||
label=_('Password'), required=False, allow_blank=True, allow_null=True, max_length=1024,
|
||||
trim_whitespace=False, validators=[validate_password_for_ansible],
|
||||
write_only=True
|
||||
)
|
||||
auto_generate_key = serializers.BooleanField(initial=True, required=False, write_only=True)
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type display'))
|
||||
ssh_key_fingerprint = serializers.ReadOnlyField(label=_('SSH key fingerprint'))
|
||||
token = EncryptedField(
|
||||
label=_('Token'), required=False, write_only=True, style={'base_template': 'textarea.html'}
|
||||
)
|
||||
applications_amount = serializers.IntegerField(
|
||||
source='apps_amount', read_only=True, label=_('Apps amount')
|
||||
)
|
||||
@@ -46,15 +56,9 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
fields_m2m = ['cmd_filters', 'assets_amount', 'applications_amount', 'nodes']
|
||||
fields = fields_small + fields_m2m
|
||||
extra_kwargs = {
|
||||
'password': {
|
||||
"write_only": True,
|
||||
'trim_whitespace': False,
|
||||
"validators": [validate_password_contains_left_double_curly_bracket]
|
||||
},
|
||||
'cmd_filters': {"required": False, 'label': _('Command filter')},
|
||||
'public_key': {"write_only": True},
|
||||
'private_key': {"write_only": True},
|
||||
'token': {"write_only": True},
|
||||
'nodes_amount': {'label': _('Nodes amount')},
|
||||
'assets_amount': {'label': _('Assets amount')},
|
||||
'login_mode_display': {'label': _('Login mode display')},
|
||||
@@ -210,7 +214,7 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
elif attrs.get('private_key'):
|
||||
private_key = attrs['private_key']
|
||||
password = attrs.get('password')
|
||||
public_key = ssh_pubkey_gen(private_key, password=password, username=username)
|
||||
public_key = parse_ssh_public_key_str(private_key, password=password)
|
||||
attrs['public_key'] = public_key
|
||||
return attrs
|
||||
|
||||
@@ -248,7 +252,7 @@ class MiniSystemUserSerializer(serializers.ModelSerializer):
|
||||
fields = SystemUserSerializer.Meta.fields_mini
|
||||
|
||||
|
||||
class SystemUserWithAuthInfoSerializer(SystemUserSerializer):
|
||||
class SystemUserWithAuthInfoSerializer(SecretReadableMixin, SystemUserSerializer):
|
||||
class Meta(SystemUserSerializer.Meta):
|
||||
fields_mini = ['id', 'name', 'username']
|
||||
fields_write_only = ['password', 'public_key', 'private_key']
|
||||
@@ -264,6 +268,9 @@ class SystemUserWithAuthInfoSerializer(SystemUserSerializer):
|
||||
'assets_amount': {'label': _('Asset')},
|
||||
'login_mode_display': {'label': _('Login mode display')},
|
||||
'created_by': {'read_only': True},
|
||||
'password': {'write_only': False},
|
||||
'private_key': {'write_only': False},
|
||||
'token': {'write_only': False}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,9 +1,25 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
||||
|
||||
def validate_password_contains_left_double_curly_bracket(password):
|
||||
|
||||
def validate_password_for_ansible(password):
|
||||
""" 校验 Ansible 不支持的特殊字符 """
|
||||
# validate password contains left double curly bracket
|
||||
# check password not contains `{{`
|
||||
# Ansible 推送的时候不支持
|
||||
if '{{' in password:
|
||||
raise serializers.ValidationError(_('Password can not contains `{{` '))
|
||||
# Ansible Windows 推送的时候不支持
|
||||
if "'" in password:
|
||||
raise serializers.ValidationError(_("Password can not contains `'` "))
|
||||
if '"' in password:
|
||||
raise serializers.ValidationError(_('Password can not contains `"` '))
|
||||
|
||||
|
||||
def validate_ssh_key(ssh_key, passphrase=None):
|
||||
valid = validate_ssh_private_key(ssh_key, password=passphrase)
|
||||
if not valid:
|
||||
raise serializers.ValidationError(_("private key invalid or passphrase error"))
|
||||
return parse_ssh_private_key_str(ssh_key, passphrase)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
import threading
|
||||
|
||||
from django.db.models.signals import (
|
||||
m2m_changed, post_save, post_delete
|
||||
@@ -9,15 +7,15 @@ from django.db.models.signals import (
|
||||
from django.dispatch import receiver
|
||||
from django.utils.functional import LazyObject
|
||||
|
||||
from common.signals import django_ready
|
||||
from common.utils.connection import RedisPubSub
|
||||
from common.utils import get_logger
|
||||
from assets.models import Asset, Node
|
||||
from common.signals import django_ready
|
||||
from common.utils import get_logger
|
||||
from common.utils.connection import RedisPubSub
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
# clear node assets mapping for memory
|
||||
# ------------------------------------
|
||||
|
||||
@@ -78,9 +76,4 @@ def subscribe_node_assets_mapping_expire(sender, **kwargs):
|
||||
Node.expire_node_all_asset_ids_mapping_from_memory(org_id)
|
||||
Node.expire_node_all_asset_ids_mapping_from_memory(root_org_id)
|
||||
|
||||
def keep_subscribe_node_assets_relation():
|
||||
node_assets_mapping_for_memory_pub_sub.subscribe(handle_node_relation_change)
|
||||
|
||||
t = threading.Thread(target=keep_subscribe_node_assets_relation)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
node_assets_mapping_for_memory_pub_sub.subscribe(handle_node_relation_change)
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import os
|
||||
import time
|
||||
import pandas as pd
|
||||
from openpyxl import Workbook
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import F
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from assets.models import AuthBook
|
||||
from assets.serializers import AccountSecretSerializer
|
||||
from assets.models import AuthBook, SystemUser, Asset
|
||||
from assets.serializers import AccountBackUpSerializer
|
||||
from assets.notifications import AccountBackupExecutionTaskMsg
|
||||
from applications.models import Account
|
||||
from applications.models import Account, Application
|
||||
from applications.const import AppType
|
||||
from applications.serializers import AppAccountSecretSerializer
|
||||
from applications.serializers import AppAccountBackUpSerializer
|
||||
from users.models import User
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_now_display
|
||||
@@ -38,7 +39,7 @@ class BaseAccountHandler:
|
||||
@classmethod
|
||||
def get_header_fields(cls, serializer: serializers.Serializer):
|
||||
try:
|
||||
backup_fields = getattr(serializer, 'Meta').fields_backup
|
||||
backup_fields = getattr(serializer, 'Meta').fields
|
||||
except AttributeError:
|
||||
backup_fields = serializer.fields.keys()
|
||||
header_fields = {}
|
||||
@@ -48,20 +49,44 @@ class BaseAccountHandler:
|
||||
_fields = cls.get_header_fields(v)
|
||||
header_fields.update(_fields)
|
||||
else:
|
||||
header_fields[field] = v.label
|
||||
header_fields[field] = str(v.label)
|
||||
return header_fields
|
||||
|
||||
@staticmethod
|
||||
def load_auth(tp, value, system_user):
|
||||
if value:
|
||||
return value
|
||||
if system_user:
|
||||
return getattr(system_user, tp, '')
|
||||
return ''
|
||||
|
||||
@classmethod
|
||||
def create_row(cls, account, serializer_cls, header_fields=None):
|
||||
serializer = serializer_cls(account)
|
||||
if not header_fields:
|
||||
header_fields = cls.get_header_fields(serializer)
|
||||
data = cls.unpack_data(serializer.data)
|
||||
def replace_auth(cls, account, system_user_dict):
|
||||
system_user = system_user_dict.get(account.systemuser_id)
|
||||
account.username = cls.load_auth('username', account.username, system_user)
|
||||
account.password = cls.load_auth('password', account.password, system_user)
|
||||
account.private_key = cls.load_auth('private_key', account.private_key, system_user)
|
||||
account.public_key = cls.load_auth('public_key', account.public_key, system_user)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_row(cls, data, header_fields):
|
||||
data = cls.unpack_data(data)
|
||||
row_dict = {}
|
||||
for field, header_name in header_fields.items():
|
||||
row_dict[header_name] = data[field]
|
||||
row_dict[header_name] = str(data.get(field, field))
|
||||
return row_dict
|
||||
|
||||
@classmethod
|
||||
def add_rows(cls, data, header_fields, sheet):
|
||||
data_map = defaultdict(list)
|
||||
for i in data:
|
||||
row = cls.create_row(i, header_fields)
|
||||
if sheet not in data_map:
|
||||
data_map[sheet].append(list(row.keys()))
|
||||
data_map[sheet].append(list(row.values()))
|
||||
return data_map
|
||||
|
||||
|
||||
class AssetAccountHandler(BaseAccountHandler):
|
||||
@staticmethod
|
||||
@@ -72,24 +97,29 @@ class AssetAccountHandler(BaseAccountHandler):
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def create_df(cls):
|
||||
df_dict = defaultdict(list)
|
||||
def replace_account_info(cls, account, asset_dict, system_user_dict):
|
||||
asset = asset_dict.get(account.asset_id)
|
||||
account.ip = asset.ip if asset else ''
|
||||
account.hostname = asset.hostname if asset else ''
|
||||
account = cls.replace_auth(account, system_user_dict)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, system_user_dict):
|
||||
sheet_name = AuthBook._meta.verbose_name
|
||||
assets = Asset.objects.only('id', 'hostname', 'ip')
|
||||
asset_dict = {asset.id: asset for asset in assets}
|
||||
accounts = AuthBook.objects.all()
|
||||
if not accounts.exists():
|
||||
return
|
||||
|
||||
accounts = AuthBook.get_queryset().select_related('systemuser')
|
||||
if not accounts.first():
|
||||
return df_dict
|
||||
|
||||
header_fields = cls.get_header_fields(AccountSecretSerializer(accounts.first()))
|
||||
header_fields = cls.get_header_fields(AccountBackUpSerializer(accounts.first()))
|
||||
for account in accounts:
|
||||
account.load_auth()
|
||||
row = cls.create_row(account, AccountSecretSerializer, header_fields)
|
||||
df_dict[sheet_name].append(row)
|
||||
for k, v in df_dict.items():
|
||||
df_dict[k] = pd.DataFrame(v)
|
||||
|
||||
cls.replace_account_info(account, asset_dict, system_user_dict)
|
||||
data = AccountBackUpSerializer(accounts, many=True).data
|
||||
data_map = cls.add_rows(data, header_fields, sheet_name)
|
||||
logger.info('\n\033[33m- 共收集 {} 条资产账号\033[0m'.format(accounts.count()))
|
||||
return df_dict
|
||||
return data_map
|
||||
|
||||
|
||||
class AppAccountHandler(BaseAccountHandler):
|
||||
@@ -101,19 +131,37 @@ class AppAccountHandler(BaseAccountHandler):
|
||||
return filename
|
||||
|
||||
@classmethod
|
||||
def create_df(cls):
|
||||
df_dict = defaultdict(list)
|
||||
accounts = Account.get_queryset().select_related('systemuser')
|
||||
for account in accounts:
|
||||
account.load_auth()
|
||||
app_type = account.type
|
||||
def replace_account_info(cls, account, app_dict, system_user_dict):
|
||||
app = app_dict.get(account.app_id)
|
||||
account.type = app.type if app else ''
|
||||
account.app_display = app.name if app else ''
|
||||
account.category = app.category if app else ''
|
||||
account = cls.replace_auth(account, system_user_dict)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def create_data_map(cls, system_user_dict):
|
||||
apps = Application.objects.only('id', 'type', 'name', 'category')
|
||||
app_dict = {app.id: app for app in apps}
|
||||
qs = Account.objects.all().annotate(app_type=F('app__type'))
|
||||
if not qs.exists():
|
||||
return
|
||||
|
||||
account_type_map = defaultdict(list)
|
||||
for i in qs:
|
||||
account_type_map[i.app_type].append(i)
|
||||
data_map = {}
|
||||
for app_type, accounts in account_type_map.items():
|
||||
sheet_name = AppType.get_label(app_type)
|
||||
row = cls.create_row(account, AppAccountSecretSerializer)
|
||||
df_dict[sheet_name].append(row)
|
||||
for k, v in df_dict.items():
|
||||
df_dict[k] = pd.DataFrame(v)
|
||||
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(accounts.count()))
|
||||
return df_dict
|
||||
header_fields = cls.get_header_fields(AppAccountBackUpSerializer(tp=app_type))
|
||||
if not accounts:
|
||||
continue
|
||||
for account in accounts:
|
||||
cls.replace_account_info(account, app_dict, system_user_dict)
|
||||
data = AppAccountBackUpSerializer(accounts, many=True, tp=app_type).data
|
||||
data_map.update(cls.add_rows(data, header_fields, sheet_name))
|
||||
logger.info('\n\033[33m- 共收集{}条应用账号\033[0m'.format(qs.count()))
|
||||
return data_map
|
||||
|
||||
|
||||
handler_map = {
|
||||
@@ -137,20 +185,27 @@ class AccountBackupHandler:
|
||||
# Print task start date
|
||||
time_start = time.time()
|
||||
files = []
|
||||
system_user_qs = SystemUser.objects.only(
|
||||
'id', 'username', 'password', 'private_key', 'public_key'
|
||||
)
|
||||
system_user_dict = {i.id: i for i in system_user_qs}
|
||||
for account_type in self.execution.types:
|
||||
handler = handler_map.get(account_type)
|
||||
if not handler:
|
||||
continue
|
||||
|
||||
df_dict = handler.create_df()
|
||||
if not df_dict:
|
||||
data_map = handler.create_data_map(system_user_dict)
|
||||
if not data_map:
|
||||
continue
|
||||
|
||||
filename = handler.get_filename(self.plan_name)
|
||||
with pd.ExcelWriter(filename) as w:
|
||||
for sheet, df in df_dict.items():
|
||||
sheet = sheet.replace(' ', '-')
|
||||
getattr(df, 'to_excel')(w, sheet_name=sheet, index=False)
|
||||
|
||||
wb = Workbook(filename)
|
||||
for sheet, data in data_map.items():
|
||||
ws = wb.create_sheet(str(sheet))
|
||||
for row in data:
|
||||
ws.append(row)
|
||||
wb.save(filename)
|
||||
files.append(filename)
|
||||
timedelta = round((time.time() - time_start), 2)
|
||||
logger.info('步骤完成: 用时 {}s'.format(timedelta))
|
||||
|
||||
@@ -32,17 +32,18 @@ def _dump_args(args: dict):
|
||||
return ' '.join([f'{k}={v}' for k, v in args.items() if v is not Empty])
|
||||
|
||||
|
||||
def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
comment = system_user.name
|
||||
|
||||
def get_push_unixlike_system_user_tasks(system_user, username=None, **kwargs):
|
||||
algorithm = kwargs.get('algorithm')
|
||||
if username is None:
|
||||
username = system_user.username
|
||||
|
||||
comment = system_user.name
|
||||
if system_user.username_same_with_user:
|
||||
from users.models import User
|
||||
user = User.objects.filter(username=username).only('name', 'username').first()
|
||||
if user:
|
||||
comment = f'{system_user.name}[{str(user)}]'
|
||||
comment = comment.replace(' ', '')
|
||||
|
||||
password = system_user.password
|
||||
public_key = system_user.public_key
|
||||
@@ -104,7 +105,7 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
'module': 'user',
|
||||
'args': 'name={} shell={} state=present password={}'.format(
|
||||
username, system_user.shell,
|
||||
encrypt_password(password, salt="K3mIlKK"),
|
||||
encrypt_password(password, salt="K3mIlKK", algorithm=algorithm),
|
||||
),
|
||||
}
|
||||
})
|
||||
@@ -138,7 +139,7 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
return tasks
|
||||
|
||||
|
||||
def get_push_windows_system_user_tasks(system_user: SystemUser, username=None):
|
||||
def get_push_windows_system_user_tasks(system_user: SystemUser, username=None, **kwargs):
|
||||
if username is None:
|
||||
username = system_user.username
|
||||
password = system_user.password
|
||||
@@ -176,7 +177,7 @@ def get_push_windows_system_user_tasks(system_user: SystemUser, username=None):
|
||||
return tasks
|
||||
|
||||
|
||||
def get_push_system_user_tasks(system_user, platform="unixlike", username=None):
|
||||
def get_push_system_user_tasks(system_user, platform="unixlike", username=None, algorithm=None):
|
||||
"""
|
||||
获取推送系统用户的 ansible 命令,跟资产无关
|
||||
:param system_user:
|
||||
@@ -190,16 +191,16 @@ def get_push_system_user_tasks(system_user, platform="unixlike", username=None):
|
||||
}
|
||||
get_tasks = get_task_map.get(platform, get_push_unixlike_system_user_tasks)
|
||||
if not system_user.username_same_with_user:
|
||||
return get_tasks(system_user)
|
||||
return get_tasks(system_user, algorithm=algorithm)
|
||||
tasks = []
|
||||
# 仅推送这个username
|
||||
if username is not None:
|
||||
tasks.extend(get_tasks(system_user, username))
|
||||
tasks.extend(get_tasks(system_user, username, algorithm=algorithm))
|
||||
return tasks
|
||||
users = system_user.users.all().values_list('username', flat=True)
|
||||
print(_("System user is dynamic: {}").format(list(users)))
|
||||
for _username in users:
|
||||
tasks.extend(get_tasks(system_user, _username))
|
||||
tasks.extend(get_tasks(system_user, _username, algorithm=algorithm))
|
||||
return tasks
|
||||
|
||||
|
||||
@@ -244,7 +245,11 @@ def push_system_user_util(system_user, assets, task_name, username=None):
|
||||
for u in usernames:
|
||||
for a in _assets:
|
||||
system_user.load_asset_special_auth(a, u)
|
||||
tasks = get_push_system_user_tasks(system_user, platform, username=u)
|
||||
algorithm = 'des' if a.platform.name == 'AIX' else 'sha512'
|
||||
tasks = get_push_system_user_tasks(
|
||||
system_user, platform, username=u,
|
||||
algorithm=algorithm
|
||||
)
|
||||
run_task(tasks, [a])
|
||||
|
||||
|
||||
@@ -269,7 +274,7 @@ def push_system_user_a_asset_manual(system_user, asset, username=None):
|
||||
# if username is None:
|
||||
# username = system_user.username
|
||||
task_name = gettext_noop("Push system users to asset: ") + "{}({}) => {}".format(
|
||||
system_user.name, username, asset
|
||||
system_user.name, username or system_user.username, asset
|
||||
)
|
||||
return push_system_user_util(system_user, [asset], task_name=task_name, username=username)
|
||||
|
||||
|
||||
@@ -13,6 +13,8 @@ router = BulkRouter()
|
||||
router.register(r'assets', api.AssetViewSet, 'asset')
|
||||
router.register(r'accounts', api.AccountViewSet, 'account')
|
||||
router.register(r'account-secrets', api.AccountSecretsViewSet, 'account-secret')
|
||||
router.register(r'accounts-history', api.AccountHistoryViewSet, 'account-history')
|
||||
router.register(r'account-history-secrets', api.AccountHistorySecretsViewSet, 'account-history-secret')
|
||||
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
|
||||
router.register(r'system-users', api.SystemUserViewSet, 'system-user')
|
||||
router.register(r'admin-users', api.AdminUserViewSet, 'admin-user')
|
||||
|
||||
@@ -1,20 +1,29 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin
|
||||
from importlib import import_module
|
||||
|
||||
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
|
||||
from django.db.models import F, Value
|
||||
from django.db.models.functions import Concat
|
||||
from django.conf import settings
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework import generics
|
||||
|
||||
from common.drf.api import JMSReadOnlyModelViewSet
|
||||
from common.plugins.es import QuerySet as ESQuerySet
|
||||
from common.drf.filters import DatetimeRangeFilter
|
||||
from common.api import CommonGenericViewSet
|
||||
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet, OrgRelationMixin
|
||||
from orgs.utils import current_org
|
||||
from ops.models import CommandExecution
|
||||
from . import filters
|
||||
from .backends import TYPE_ENGINE_MAPPING
|
||||
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog
|
||||
from .serializers import FTPLogSerializer, UserLoginLogSerializer, CommandExecutionSerializer
|
||||
from .serializers import OperateLogSerializer, PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
from .serializers import (
|
||||
OperateLogSerializer, OperateLogActionDetailSerializer,
|
||||
PasswordChangeLogSerializer, CommandExecutionHostsRelationSerializer
|
||||
)
|
||||
|
||||
|
||||
class FTPLogViewSet(CreateModelMixin,
|
||||
@@ -67,7 +76,7 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
|
||||
return qs
|
||||
|
||||
|
||||
class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
|
||||
model = OperateLog
|
||||
serializer_class = OperateLogSerializer
|
||||
extra_filter_backends = [DatetimeRangeFilter]
|
||||
@@ -78,6 +87,22 @@ class OperateLogViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
search_fields = ['resource']
|
||||
ordering = ['-datetime']
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('type') == 'action_detail':
|
||||
return OperateLogActionDetailSerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def get_queryset(self):
|
||||
qs = OperateLog.objects.all()
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
store = engine_mod.OperateLogStore(es_config)
|
||||
if store.ping(timeout=2):
|
||||
qs = ESQuerySet(store)
|
||||
qs.model = OperateLog
|
||||
return qs
|
||||
|
||||
|
||||
class PasswordChangeLogViewSet(ListModelMixin, CommonGenericViewSet):
|
||||
queryset = PasswordChangeLog.objects.all()
|
||||
@@ -126,9 +151,7 @@ class CommandExecutionViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
class CommandExecutionHostRelationViewSet(OrgRelationMixin, OrgBulkModelViewSet):
|
||||
serializer_class = CommandExecutionHostsRelationSerializer
|
||||
m2m_field = CommandExecution.hosts.field
|
||||
filterset_fields = [
|
||||
'id', 'asset', 'commandexecution'
|
||||
]
|
||||
filterset_class = filters.CommandExecutionFilter
|
||||
search_fields = ('asset__hostname', )
|
||||
http_method_names = ['options', 'get']
|
||||
rbac_perms = {
|
||||
|
||||
18
apps/audits/backends/__init__.py
Normal file
18
apps/audits/backends/__init__.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
TYPE_ENGINE_MAPPING = {
|
||||
'db': 'audits.backends.db',
|
||||
'es': 'audits.backends.es',
|
||||
}
|
||||
|
||||
|
||||
def get_operate_log_storage(default=False):
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['db'])
|
||||
es_config = settings.OPERATE_LOG_ELASTICSEARCH_CONFIG
|
||||
if not default and es_config:
|
||||
engine_mod = import_module(TYPE_ENGINE_MAPPING['es'])
|
||||
storage = engine_mod.OperateLogStore(es_config)
|
||||
return storage
|
||||
38
apps/audits/backends/db.py
Normal file
38
apps/audits/backends/db.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from audits.models import OperateLog
|
||||
|
||||
|
||||
class OperateLogStore(object):
|
||||
def __init__(self, config):
|
||||
self.model = OperateLog
|
||||
self.max_length = 1024
|
||||
self.max_length_tip_msg = _(
|
||||
'The text content is too long. Use Elasticsearch to store operation logs'
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def ping(timeout=None):
|
||||
return True
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
if len(str(before)) > self.max_length:
|
||||
before = {_('Tips'): self.max_length_tip_msg}
|
||||
if len(str(after)) > self.max_length:
|
||||
after = {_('Tips'): self.max_length_tip_msg}
|
||||
|
||||
op_log = self.model.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
self.model.objects.create(**kwargs)
|
||||
85
apps/audits/backends/es.py
Normal file
85
apps/audits/backends/es.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
|
||||
from common.utils.timezone import local_now_display
|
||||
from common.utils import get_logger
|
||||
from common.utils.encode import Singleton
|
||||
from common.plugins.es import ES
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class OperateLogStore(ES, metaclass=Singleton):
|
||||
def __init__(self, config):
|
||||
properties = {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"user": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"action": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"resource_type": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"org_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"datetime": {
|
||||
"type": "date",
|
||||
"format": "yyyy-MM-dd HH:mm:ss"
|
||||
}
|
||||
}
|
||||
exact_fields = {}
|
||||
match_fields = {
|
||||
'id', 'user', 'action', 'resource_type',
|
||||
'resource', 'remote_addr', 'org_id'
|
||||
}
|
||||
keyword_fields = {
|
||||
'id', 'user', 'action', 'resource_type', 'org_id'
|
||||
}
|
||||
if not config.get('INDEX'):
|
||||
config['INDEX'] = 'jumpserver_operate_log'
|
||||
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
||||
self.pre_use_check()
|
||||
|
||||
@staticmethod
|
||||
def make_data(data):
|
||||
op_id = data.get('id', str(uuid.uuid4()))
|
||||
datetime_param = data.get('datetime', local_now_display())
|
||||
data = {
|
||||
'id': op_id, 'user': data['user'], 'action': data['action'],
|
||||
'resource_type': data['resource_type'], 'resource': data['resource'],
|
||||
'remote_addr': data['remote_addr'], 'datetime': datetime_param,
|
||||
'before': data['before'], 'after': data['after'], 'org_id': data['org_id']
|
||||
}
|
||||
return data
|
||||
|
||||
def save(self, **kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
before = kwargs.get('before') or {}
|
||||
after = kwargs.get('after') or {}
|
||||
|
||||
op_log = self.get({'id': log_id})
|
||||
if op_log is not None:
|
||||
data = {'doc': {}}
|
||||
raw_after = op_log.get('after') or {}
|
||||
raw_before = op_log.get('before') or {}
|
||||
raw_before.update(before)
|
||||
raw_after.update(after)
|
||||
data['doc']['before'] = raw_before
|
||||
data['doc']['after'] = raw_after
|
||||
self.es.update(
|
||||
index=self.index, doc_type=self.doc_type,
|
||||
id=op_log.get('es_id'), body=data, refresh=True
|
||||
)
|
||||
else:
|
||||
data = self.make_data(kwargs)
|
||||
self.es.index(
|
||||
index=self.index, doc_type=self.doc_type, body=data,
|
||||
refresh=True
|
||||
)
|
||||
@@ -3,3 +3,34 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
DEFAULT_CITY = _("Unknown")
|
||||
|
||||
MODELS_NEED_RECORD = (
|
||||
# users
|
||||
'User', 'UserGroup',
|
||||
# authentication
|
||||
'AccessKey', 'TempToken',
|
||||
# acls
|
||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||
# assets
|
||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||
'CommandFilter', 'Platform', 'Label',
|
||||
# applications
|
||||
'Application',
|
||||
# account
|
||||
'AuthBook',
|
||||
# orgs
|
||||
'Organization',
|
||||
# settings
|
||||
'Setting',
|
||||
# perms
|
||||
'AssetPermission', 'ApplicationPermission',
|
||||
# notifications
|
||||
'SystemMsgSubscription', 'UserMsgSubscription',
|
||||
# Terminal
|
||||
'Terminal', 'Endpoint', 'EndpointRule', 'CommandStorage', 'ReplayStorage',
|
||||
# rbac
|
||||
'Role', 'SystemRole', 'OrgRole', 'RoleBinding', 'OrgRoleBinding', 'SystemRoleBinding',
|
||||
# xpack
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'ApplicationChangeAuthPlan',
|
||||
'GatherUserTask', 'Interface',
|
||||
)
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
from django.db.models import F, Value
|
||||
from django.db.models.functions import Concat
|
||||
from django_filters.rest_framework import CharFilter
|
||||
from rest_framework import filters
|
||||
from rest_framework.compat import coreapi, coreschema
|
||||
|
||||
from orgs.utils import current_org
|
||||
from ops.models import CommandExecution
|
||||
from common.drf.filters import BaseFilterSet
|
||||
|
||||
|
||||
__all__ = ['CurrentOrgMembersFilter']
|
||||
__all__ = ['CurrentOrgMembersFilter', 'CommandExecutionFilter']
|
||||
|
||||
|
||||
class CurrentOrgMembersFilter(filters.BaseFilterBackend):
|
||||
@@ -30,3 +34,22 @@ class CurrentOrgMembersFilter(filters.BaseFilterBackend):
|
||||
else:
|
||||
queryset = queryset.filter(user__in=self._get_user_list())
|
||||
return queryset
|
||||
|
||||
|
||||
class CommandExecutionFilter(BaseFilterSet):
|
||||
hostname_ip = CharFilter(method='filter_hostname_ip')
|
||||
|
||||
class Meta:
|
||||
model = CommandExecution.hosts.through
|
||||
fields = (
|
||||
'id', 'asset', 'commandexecution', 'hostname_ip'
|
||||
)
|
||||
|
||||
def filter_hostname_ip(self, queryset, name, value):
|
||||
queryset = queryset.annotate(
|
||||
hostname_ip=Concat(
|
||||
F('asset__hostname'), Value('('),
|
||||
F('asset__ip'), Value(')')
|
||||
)
|
||||
).filter(hostname_ip__icontains=value)
|
||||
return queryset
|
||||
|
||||
191
apps/audits/handler.py
Normal file
191
apps/audits/handler.py
Normal file
@@ -0,0 +1,191 @@
|
||||
from datetime import datetime
|
||||
|
||||
from django.db import transaction
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils import get_request_ip, get_logger
|
||||
from common.utils.timezone import as_current_tz
|
||||
from common.utils.encode import Singleton
|
||||
from common.local import encrypted_field_set
|
||||
from settings.serializers import SettingsSerializer
|
||||
from jumpserver.utils import current_request
|
||||
from audits.models import OperateLog
|
||||
from orgs.utils import get_current_org_id
|
||||
|
||||
from .backends import get_operate_log_storage
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class ModelClient:
|
||||
@staticmethod
|
||||
def save(**kwargs):
|
||||
log_id = kwargs.get('id', '')
|
||||
op_log = OperateLog.objects.filter(pk=log_id).first()
|
||||
if op_log is not None:
|
||||
raw_after = op_log.after or {}
|
||||
raw_before = op_log.before or {}
|
||||
cur_before = kwargs.get('before') or {}
|
||||
cur_after = kwargs.get('after') or {}
|
||||
raw_before.update(cur_before)
|
||||
raw_after.update(cur_after)
|
||||
op_log.before = raw_before
|
||||
op_log.after = raw_after
|
||||
op_log.save()
|
||||
else:
|
||||
OperateLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
class OperatorLogHandler(metaclass=Singleton):
|
||||
CACHE_KEY = 'OPERATOR_LOG_CACHE_KEY'
|
||||
|
||||
def __init__(self):
|
||||
self.log_client = self.get_storage_client()
|
||||
|
||||
@staticmethod
|
||||
def get_storage_client():
|
||||
client = get_operate_log_storage()
|
||||
return client
|
||||
|
||||
@staticmethod
|
||||
def _consistent_type_to_str(value1, value2):
|
||||
if isinstance(value1, datetime):
|
||||
value1 = as_current_tz(value1).strftime('%Y-%m-%d %H:%M:%S')
|
||||
if isinstance(value2, datetime):
|
||||
value2 = as_current_tz(value2).strftime('%Y-%m-%d %H:%M:%S')
|
||||
return value1, value2
|
||||
|
||||
def _look_for_two_dict_change(self, left_dict, right_dict):
|
||||
# 以右边的字典为基础
|
||||
before, after = {}, {}
|
||||
for key, value in right_dict.items():
|
||||
pre_value = left_dict.get(key, '')
|
||||
pre_value, value = self._consistent_type_to_str(pre_value, value)
|
||||
if sorted(str(value)) == sorted(str(pre_value)):
|
||||
continue
|
||||
if pre_value:
|
||||
before[key] = pre_value
|
||||
if value:
|
||||
after[key] = value
|
||||
return before, after
|
||||
|
||||
def cache_instance_before_data(self, instance_dict):
|
||||
instance_id = instance_dict.get('id')
|
||||
if instance_id is None:
|
||||
return
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache.set(key, instance_dict, 3 * 60)
|
||||
|
||||
def get_instance_dict_from_cache(self, instance_id):
|
||||
if instance_id is None:
|
||||
return None
|
||||
|
||||
key = '%s_%s' % (self.CACHE_KEY, instance_id)
|
||||
cache_instance = cache.get(key, {})
|
||||
log_id = cache_instance.get('operate_log_id')
|
||||
return log_id, cache_instance
|
||||
|
||||
def get_instance_current_with_cache_diff(self, current_instance):
|
||||
log_id, before, after = None, None, None
|
||||
instance_id = current_instance.get('id')
|
||||
if instance_id is None:
|
||||
return log_id, before, after
|
||||
|
||||
log_id, cache_instance = self.get_instance_dict_from_cache(instance_id)
|
||||
if not cache_instance:
|
||||
return log_id, before, after
|
||||
|
||||
before, after = self._look_for_two_dict_change(
|
||||
cache_instance, current_instance
|
||||
)
|
||||
return log_id, before, after
|
||||
|
||||
@staticmethod
|
||||
def get_resource_display_from_setting(resource):
|
||||
resource_display = None
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
def get_resource_display(self, resource):
|
||||
resource_display = str(resource)
|
||||
return_value = self.get_resource_display_from_setting(resource_display)
|
||||
if return_value is not None:
|
||||
resource_display = return_value
|
||||
return resource_display
|
||||
|
||||
@staticmethod
|
||||
def serialized_value(value: (list, tuple)):
|
||||
if len(value) == 0:
|
||||
return ''
|
||||
if isinstance(value[0], str):
|
||||
return ','.join(value)
|
||||
return ','.join([i['value'] for i in value if i.get('value')])
|
||||
|
||||
def __data_processing(self, dict_item, loop=True):
|
||||
encrypt_value = '******'
|
||||
for key, value in dict_item.items():
|
||||
if isinstance(value, bool):
|
||||
value = _('Yes') if value else _('No')
|
||||
elif isinstance(value, (list, tuple)):
|
||||
value = self.serialized_value(value)
|
||||
elif isinstance(value, dict) and loop:
|
||||
self.__data_processing(value, loop=False)
|
||||
if key in encrypted_field_set:
|
||||
value = encrypt_value
|
||||
dict_item[key] = value
|
||||
return dict_item
|
||||
|
||||
def data_processing(self, before, after):
|
||||
if before:
|
||||
before = self.__data_processing(before)
|
||||
if after:
|
||||
after = self.__data_processing(after)
|
||||
return before, after
|
||||
|
||||
def create_or_update_operate_log(
|
||||
self, action, resource_type, resource=None,
|
||||
force=False, log_id=None, before=None, after=None
|
||||
):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
|
||||
remote_addr = get_request_ip(current_request)
|
||||
resource_display = self.get_resource_display(resource)
|
||||
before, after = self.data_processing(before, after)
|
||||
if not force and not any([before, after]):
|
||||
# 前后都没变化,没必要生成日志,除非手动强制保存
|
||||
return
|
||||
|
||||
data = {
|
||||
'id': log_id, "user": str(user), 'action': action,
|
||||
'resource_type': str(resource_type), 'resource': resource_display,
|
||||
'remote_addr': remote_addr, 'before': before, 'after': after,
|
||||
'org_id': get_current_org_id(),
|
||||
}
|
||||
with transaction.atomic():
|
||||
if self.log_client.ping(timeout=1):
|
||||
client = self.log_client
|
||||
else:
|
||||
logger.info('Switch default operate log storage save.')
|
||||
client = get_operate_log_storage(default=True)
|
||||
|
||||
try:
|
||||
client.save(**data)
|
||||
except Exception as e:
|
||||
error_msg = 'An error occurred saving OperateLog.' \
|
||||
'Error: %s, Data: %s' % (e, data)
|
||||
logger.error(error_msg)
|
||||
|
||||
|
||||
op_handler = OperatorLogHandler()
|
||||
create_or_update_operate_log = op_handler.create_or_update_operate_log
|
||||
cache_instance_before_data = op_handler.cache_instance_before_data
|
||||
get_instance_current_with_cache_diff = op_handler.get_instance_current_with_cache_diff
|
||||
get_instance_dict_from_cache = op_handler.get_instance_dict_from_cache
|
||||
18
apps/audits/migrations/0014_auto_20220505_1902.py
Normal file
18
apps/audits/migrations/0014_auto_20220505_1902.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1.14 on 2022-05-05 11:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0013_auto_20211130_1037'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='operatelog',
|
||||
name='action',
|
||||
field=models.CharField(choices=[('create', 'Create'), ('view', 'View'), ('update', 'Update'), ('delete', 'Delete')], max_length=16, verbose_name='Action'),
|
||||
),
|
||||
]
|
||||
24
apps/audits/migrations/0015_auto_20221011_1745.py
Normal file
24
apps/audits/migrations/0015_auto_20221011_1745.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Generated by Django 3.2.14 on 2022-10-11 09:45
|
||||
|
||||
import common.db.encoder
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0014_auto_20220505_1902'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='after',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='operatelog',
|
||||
name='before',
|
||||
field=models.JSONField(default=dict, encoder=common.db.encoder.ModelJSONFieldEncoder, null=True),
|
||||
),
|
||||
]
|
||||
@@ -4,8 +4,9 @@ from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext, ugettext_lazy as _
|
||||
from django.utils import timezone
|
||||
from common.utils import lazyproperty
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from common.db.encoder import ModelJSONFieldEncoder
|
||||
from orgs.mixins.models import OrgModelMixin, Organization
|
||||
from orgs.utils import current_org
|
||||
|
||||
@@ -49,10 +50,12 @@ class FTPLog(OrgModelMixin):
|
||||
|
||||
class OperateLog(OrgModelMixin):
|
||||
ACTION_CREATE = 'create'
|
||||
ACTION_VIEW = 'view'
|
||||
ACTION_UPDATE = 'update'
|
||||
ACTION_DELETE = 'delete'
|
||||
ACTION_CHOICES = (
|
||||
(ACTION_CREATE, _("Create")),
|
||||
(ACTION_VIEW, _("View")),
|
||||
(ACTION_UPDATE, _("Update")),
|
||||
(ACTION_DELETE, _("Delete"))
|
||||
)
|
||||
@@ -63,6 +66,8 @@ class OperateLog(OrgModelMixin):
|
||||
resource = models.CharField(max_length=128, verbose_name=_("Resource"))
|
||||
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
|
||||
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
|
||||
before = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
after = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
|
||||
|
||||
def __str__(self):
|
||||
return "<{}> {} <{}>".format(self.user, self.action, self.resource)
|
||||
@@ -76,6 +81,21 @@ class OperateLog(OrgModelMixin):
|
||||
self.org_id = Organization.ROOT_ID
|
||||
return super(OperateLog, self).save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d):
|
||||
self = cls()
|
||||
for k, v in d.items():
|
||||
setattr(self, k, v)
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_multi_dict(cls, l):
|
||||
operate_logs = []
|
||||
for d in l:
|
||||
operate_log = cls.from_dict(d)
|
||||
operate_logs.append(operate_log)
|
||||
return operate_logs
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Operate log")
|
||||
|
||||
|
||||
@@ -47,6 +47,12 @@ class UserLoginLogSerializer(serializers.ModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class OperateLogActionDetailSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.OperateLog
|
||||
fields = ('before', 'after')
|
||||
|
||||
|
||||
class OperateLogSerializer(serializers.ModelSerializer):
|
||||
action_display = serializers.CharField(source='get_action_display', label=_('Action'))
|
||||
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete
|
||||
post_save, m2m_changed, pre_delete, pre_save
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
@@ -14,48 +16,36 @@ from django.utils import translation
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.request import Request
|
||||
|
||||
from assets.models import Asset, SystemUser
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser, CommandFilter
|
||||
from terminal.models import Session, Command
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from rbac.models import Role
|
||||
|
||||
from audits.utils import model_to_dict_for_operate_log as model_to_dict
|
||||
from audits.handler import (
|
||||
get_instance_current_with_cache_diff, cache_instance_before_data,
|
||||
create_or_update_operate_log, get_instance_dict_from_cache
|
||||
)
|
||||
from authentication.signals import post_auth_failed, post_auth_success
|
||||
from authentication.utils import check_different_city_login_if_need
|
||||
from jumpserver.utils import current_request
|
||||
from users.models import User
|
||||
from users.signals import post_user_change_password
|
||||
from terminal.models import Session, Command
|
||||
from .utils import write_login_log
|
||||
from . import models, serializers
|
||||
from .models import OperateLog
|
||||
from orgs.utils import current_org
|
||||
from perms.models import AssetPermission, ApplicationPermission
|
||||
from .const import MODELS_NEED_RECORD
|
||||
from terminal.backends.command.serializers import SessionCommandSerializer
|
||||
from terminal.serializers import SessionSerializer
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, POST_CLEAR, SKIP_SIGNAL
|
||||
from common.utils import get_request_ip, get_logger, get_syslogger
|
||||
from common.utils.encode import data_to_json
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
sys_logger = get_syslogger(__name__)
|
||||
json_render = JSONRenderer()
|
||||
|
||||
MODELS_NEED_RECORD = (
|
||||
# users
|
||||
'User', 'UserGroup',
|
||||
# acls
|
||||
'LoginACL', 'LoginAssetACL', 'LoginConfirmSetting',
|
||||
# assets
|
||||
'Asset', 'Node', 'AdminUser', 'SystemUser', 'Domain', 'Gateway', 'CommandFilterRule',
|
||||
'CommandFilter', 'Platform', 'AuthBook',
|
||||
# applications
|
||||
'Application',
|
||||
# orgs
|
||||
'Organization',
|
||||
# settings
|
||||
'Setting',
|
||||
# perms
|
||||
'AssetPermission', 'ApplicationPermission',
|
||||
# xpack
|
||||
'License', 'Account', 'SyncInstanceTask', 'ChangeAuthPlan', 'GatherUserTask',
|
||||
)
|
||||
|
||||
|
||||
class AuthBackendLabelMapping(LazyObject):
|
||||
@staticmethod
|
||||
@@ -69,6 +59,7 @@ class AuthBackendLabelMapping(LazyObject):
|
||||
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _('SSO')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_AUTH_TOKEN] = _('Auth Token')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_WECOM] = _('WeCom')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_FEISHU] = _('FeiShu')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_DINGTALK] = _('DingTalk')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_TEMP_TOKEN] = _('Temporary token')
|
||||
return backend_label_mapping
|
||||
@@ -79,92 +70,6 @@ class AuthBackendLabelMapping(LazyObject):
|
||||
|
||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||
|
||||
|
||||
def create_operate_log(action, sender, resource):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
return
|
||||
model_name = sender._meta.object_name
|
||||
if model_name not in MODELS_NEED_RECORD:
|
||||
return
|
||||
with translation.override('en'):
|
||||
resource_type = sender._meta.verbose_name
|
||||
remote_addr = get_request_ip(current_request)
|
||||
|
||||
data = {
|
||||
"user": str(user), 'action': action, 'resource_type': resource_type,
|
||||
'resource': str(resource), 'remote_addr': remote_addr,
|
||||
}
|
||||
with transaction.atomic():
|
||||
try:
|
||||
models.OperateLog.objects.create(**data)
|
||||
except Exception as e:
|
||||
logger.error("Create operate log error: {}".format(e))
|
||||
|
||||
|
||||
M2M_NEED_RECORD = {
|
||||
User.groups.through._meta.object_name: (
|
||||
_('User and Group'),
|
||||
_('{User} JOINED {UserGroup}'),
|
||||
_('{User} LEFT {UserGroup}')
|
||||
),
|
||||
SystemUser.assets.through._meta.object_name: (
|
||||
_('Asset and SystemUser'),
|
||||
_('{Asset} ADD {SystemUser}'),
|
||||
_('{Asset} REMOVE {SystemUser}')
|
||||
),
|
||||
Asset.nodes.through._meta.object_name: (
|
||||
_('Node and Asset'),
|
||||
_('{Node} ADD {Asset}'),
|
||||
_('{Node} REMOVE {Asset}')
|
||||
),
|
||||
AssetPermission.users.through._meta.object_name: (
|
||||
_('User asset permissions'),
|
||||
_('{AssetPermission} ADD {User}'),
|
||||
_('{AssetPermission} REMOVE {User}'),
|
||||
),
|
||||
AssetPermission.user_groups.through._meta.object_name: (
|
||||
_('User group asset permissions'),
|
||||
_('{AssetPermission} ADD {UserGroup}'),
|
||||
_('{AssetPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
AssetPermission.assets.through._meta.object_name: (
|
||||
_('Asset permission'),
|
||||
_('{AssetPermission} ADD {Asset}'),
|
||||
_('{AssetPermission} REMOVE {Asset}'),
|
||||
),
|
||||
AssetPermission.nodes.through._meta.object_name: (
|
||||
_('Node permission'),
|
||||
_('{AssetPermission} ADD {Node}'),
|
||||
_('{AssetPermission} REMOVE {Node}'),
|
||||
),
|
||||
AssetPermission.system_users.through._meta.object_name: (
|
||||
_('Asset permission and SystemUser'),
|
||||
_('{AssetPermission} ADD {SystemUser}'),
|
||||
_('{AssetPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
ApplicationPermission.users.through._meta.object_name: (
|
||||
_('User application permissions'),
|
||||
_('{ApplicationPermission} ADD {User}'),
|
||||
_('{ApplicationPermission} REMOVE {User}'),
|
||||
),
|
||||
ApplicationPermission.user_groups.through._meta.object_name: (
|
||||
_('User group application permissions'),
|
||||
_('{ApplicationPermission} ADD {UserGroup}'),
|
||||
_('{ApplicationPermission} REMOVE {UserGroup}'),
|
||||
),
|
||||
ApplicationPermission.applications.through._meta.object_name: (
|
||||
_('Application permission'),
|
||||
_('{ApplicationPermission} ADD {Application}'),
|
||||
_('{ApplicationPermission} REMOVE {Application}'),
|
||||
),
|
||||
ApplicationPermission.system_users.through._meta.object_name: (
|
||||
_('Application permission and SystemUser'),
|
||||
_('{ApplicationPermission} ADD {SystemUser}'),
|
||||
_('{ApplicationPermission} REMOVE {SystemUser}'),
|
||||
),
|
||||
}
|
||||
|
||||
M2M_ACTION = {
|
||||
POST_ADD: OperateLog.ACTION_CREATE,
|
||||
POST_REMOVE: OperateLog.ACTION_DELETE,
|
||||
@@ -176,60 +81,115 @@ M2M_ACTION = {
|
||||
def on_m2m_changed(sender, action, instance, reverse, model, pk_set, **kwargs):
|
||||
if action not in M2M_ACTION:
|
||||
return
|
||||
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
if not instance:
|
||||
return
|
||||
|
||||
sender_name = sender._meta.object_name
|
||||
if sender_name in M2M_NEED_RECORD:
|
||||
org_id = current_org.id
|
||||
remote_addr = get_request_ip(current_request)
|
||||
user = str(user)
|
||||
resource_type, resource_tmpl_add, resource_tmpl_remove = M2M_NEED_RECORD[sender_name]
|
||||
action = M2M_ACTION[action]
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
resource_tmpl = resource_tmpl_add
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
resource_tmpl = resource_tmpl_remove
|
||||
resource_type = instance._meta.verbose_name
|
||||
current_instance = model_to_dict(instance, include_model_fields=False)
|
||||
|
||||
to_create = []
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
instance_id = current_instance.get('id')
|
||||
log_id, before_instance = get_instance_dict_from_cache(instance_id)
|
||||
|
||||
instance_name = instance._meta.object_name
|
||||
instance_value = str(instance)
|
||||
field_name = str(model._meta.verbose_name)
|
||||
objs = model.objects.filter(pk__in=pk_set)
|
||||
objs_display = [str(o) for o in objs]
|
||||
action = M2M_ACTION[action]
|
||||
changed_field = current_instance.get(field_name, [])
|
||||
|
||||
model_name = model._meta.object_name
|
||||
after, before, before_value = None, None, None
|
||||
if action == OperateLog.ACTION_CREATE:
|
||||
before_value = list(set(changed_field) - set(objs_display))
|
||||
elif action == OperateLog.ACTION_DELETE:
|
||||
before_value = list(
|
||||
set(changed_field).symmetric_difference(set(objs_display))
|
||||
)
|
||||
|
||||
for obj in objs:
|
||||
resource = resource_tmpl.format(**{
|
||||
instance_name: instance_value,
|
||||
model_name: str(obj)
|
||||
})[:128] # `resource` 字段只有 128 个字符长 😔
|
||||
if changed_field:
|
||||
after = {field_name: changed_field}
|
||||
if before_value:
|
||||
before = {field_name: before_value}
|
||||
|
||||
to_create.append(OperateLog(
|
||||
user=user, action=action, resource_type=resource_type,
|
||||
resource=resource, remote_addr=remote_addr, org_id=org_id
|
||||
))
|
||||
OperateLog.objects.bulk_create(to_create)
|
||||
if sorted(str(before)) == sorted(str(after)):
|
||||
return
|
||||
|
||||
create_or_update_operate_log(
|
||||
OperateLog.ACTION_UPDATE, resource_type,
|
||||
resource=instance, log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
def signal_of_operate_log_whether_continue(sender, instance, created, update_fields=None):
|
||||
condition = True
|
||||
if not instance:
|
||||
condition = False
|
||||
if instance and getattr(instance, SKIP_SIGNAL, False):
|
||||
condition = False
|
||||
# 终端模型的 create 事件由系统产生,不记录
|
||||
if instance._meta.object_name == 'Terminal' and created:
|
||||
condition = False
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
condition = False
|
||||
# 不在记录白名单中,跳过
|
||||
if sender._meta.object_name not in MODELS_NEED_RECORD:
|
||||
condition = False
|
||||
return condition
|
||||
|
||||
|
||||
@receiver(pre_save)
|
||||
def on_object_pre_create_or_update(sender, instance=None, raw=False, using=None, update_fields=None, **kwargs):
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, False, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
instance_before_data = {'id': instance.id}
|
||||
raw_instance = type(instance).objects.filter(pk=instance.id).first()
|
||||
if raw_instance:
|
||||
instance_before_data = model_to_dict(raw_instance)
|
||||
operate_log_id = str(uuid.uuid4())
|
||||
instance_before_data['operate_log_id'] = operate_log_id
|
||||
setattr(instance, 'operate_log_id', operate_log_id)
|
||||
cache_instance_before_data(instance_before_data)
|
||||
|
||||
|
||||
@receiver(post_save)
|
||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
ok = signal_of_operate_log_whether_continue(
|
||||
sender, instance, created, update_fields
|
||||
)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
log_id, before, after = None, None, None
|
||||
if created:
|
||||
action = models.OperateLog.ACTION_CREATE
|
||||
after = model_to_dict(instance)
|
||||
log_id = getattr(instance, 'operate_log_id', None)
|
||||
else:
|
||||
action = models.OperateLog.ACTION_UPDATE
|
||||
create_operate_log(action, sender, instance)
|
||||
current_instance = model_to_dict(instance)
|
||||
log_id, before, after = get_instance_current_with_cache_diff(current_instance)
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
action, resource_type, resource=instance,
|
||||
log_id=log_id, before=before, after=after
|
||||
)
|
||||
|
||||
|
||||
@receiver(pre_delete)
|
||||
def on_object_delete(sender, instance=None, **kwargs):
|
||||
create_operate_log(models.OperateLog.ACTION_DELETE, sender, instance)
|
||||
ok = signal_of_operate_log_whether_continue(sender, instance, False)
|
||||
if not ok:
|
||||
return
|
||||
|
||||
resource_type = sender._meta.verbose_name
|
||||
create_or_update_operate_log(
|
||||
models.OperateLog.ACTION_DELETE, resource_type,
|
||||
resource=instance, before=model_to_dict(instance)
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_user_change_password, sender=User)
|
||||
@@ -316,6 +276,7 @@ def on_user_auth_success(sender, user, request, login_type=None, **kwargs):
|
||||
logger.debug('User login success: {}'.format(user.username))
|
||||
check_different_city_login_if_need(user, request)
|
||||
data = generate_data(user.username, request, login_type=login_type)
|
||||
request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S")
|
||||
data.update({'mfa': int(user.mfa_enabled), 'status': True})
|
||||
write_login_log(**data)
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ def clean_ftp_log_period():
|
||||
now = timezone.now()
|
||||
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
FTPLog.objects.filter(datetime__lt=expired_day).delete()
|
||||
FTPLog.objects.filter(date_start__lt=expired_day).delete()
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
import csv
|
||||
import codecs
|
||||
from django.http import HttpResponse
|
||||
|
||||
from itertools import chain
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.db import models
|
||||
|
||||
from settings.serializers import SettingsSerializer
|
||||
from common.utils import validate_ip, get_ip_city, get_logger
|
||||
from common.db import fields
|
||||
from .const import DEFAULT_CITY
|
||||
from common.utils import validate_ip, get_ip_city
|
||||
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def get_excel_response(filename):
|
||||
@@ -36,3 +45,59 @@ def write_login_log(*args, **kwargs):
|
||||
city = get_ip_city(ip) or DEFAULT_CITY
|
||||
kwargs.update({'ip': ip, 'city': city})
|
||||
UserLoginLog.objects.create(**kwargs)
|
||||
|
||||
|
||||
def get_resource_display(resource):
|
||||
resource_display = str(resource)
|
||||
setting_serializer = SettingsSerializer()
|
||||
label = setting_serializer.get_field_label(resource_display)
|
||||
if label is not None:
|
||||
resource_display = label
|
||||
return resource_display
|
||||
|
||||
|
||||
def model_to_dict_for_operate_log(
|
||||
instance, include_model_fields=True, include_related_fields=True
|
||||
):
|
||||
model_need_continue_fields = ['date_updated']
|
||||
m2m_need_continue_fields = ['history_passwords']
|
||||
opts = instance._meta
|
||||
data = {}
|
||||
for f in chain(opts.concrete_fields, opts.private_fields):
|
||||
if isinstance(f, (models.FileField, models.ImageField)):
|
||||
continue
|
||||
|
||||
if getattr(f, 'attname', None) in model_need_continue_fields:
|
||||
continue
|
||||
|
||||
value = getattr(instance, f.name) or getattr(instance, f.attname)
|
||||
if not isinstance(value, bool) and not value:
|
||||
continue
|
||||
|
||||
if getattr(f, 'primary_key', False):
|
||||
f.verbose_name = 'id'
|
||||
elif isinstance(value, list):
|
||||
value = [str(v) for v in value]
|
||||
|
||||
if include_model_fields or getattr(f, 'primary_key', False):
|
||||
data[str(f.verbose_name)] = value
|
||||
|
||||
if include_related_fields:
|
||||
for f in chain(opts.many_to_many, opts.related_objects):
|
||||
value = []
|
||||
if instance.pk is not None:
|
||||
related_name = getattr(f, 'attname', '') or getattr(f, 'related_name', '')
|
||||
if not related_name or related_name in m2m_need_continue_fields:
|
||||
continue
|
||||
try:
|
||||
value = [str(i) for i in getattr(instance, related_name).all()]
|
||||
except:
|
||||
pass
|
||||
if not value:
|
||||
continue
|
||||
try:
|
||||
field_key = getattr(f, 'verbose_name', None) or f.related_model._meta.verbose_name
|
||||
data[str(field_key)] = value
|
||||
except:
|
||||
pass
|
||||
return data
|
||||
|
||||
@@ -5,6 +5,7 @@ from .connection_token import *
|
||||
from .token import *
|
||||
from .mfa import *
|
||||
from .access_key import *
|
||||
from .confirm import *
|
||||
from .login_confirm import *
|
||||
from .sso import *
|
||||
from .wecom import *
|
||||
|
||||
64
apps/authentication/api/confirm.py
Normal file
64
apps/authentication/api/confirm.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import time
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.generics import RetrieveAPIView, CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
from common.permissions import IsValidUser, UserConfirmation
|
||||
from ..const import ConfirmType
|
||||
from ..serializers import ConfirmSerializer
|
||||
|
||||
|
||||
class ConfirmBindORUNBindOAuth(RetrieveAPIView):
|
||||
permission_classes = (UserConfirmation.require(ConfirmType.ReLogin),)
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
return Response('ok')
|
||||
|
||||
|
||||
class ConfirmApi(RetrieveAPIView, CreateAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
serializer_class = ConfirmSerializer
|
||||
|
||||
def get_confirm_backend(self, confirm_type):
|
||||
backend_classes = ConfirmType.get_can_confirm_backend_classes(confirm_type)
|
||||
if not backend_classes:
|
||||
return
|
||||
for backend_cls in backend_classes:
|
||||
backend = backend_cls(self.request.user, self.request)
|
||||
if not backend.check():
|
||||
continue
|
||||
return backend
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
confirm_type = request.query_params.get('confirm_type')
|
||||
backend = self.get_confirm_backend(confirm_type)
|
||||
if backend is None:
|
||||
msg = _('This action require verify your MFA')
|
||||
return Response(data={'error': msg}, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
data = {
|
||||
'confirm_type': backend.name,
|
||||
'content': backend.content,
|
||||
}
|
||||
return Response(data=data)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
validated_data = serializer.validated_data
|
||||
|
||||
confirm_type = validated_data.get('confirm_type')
|
||||
mfa_type = validated_data.get('mfa_type')
|
||||
secret_key = validated_data.get('secret_key')
|
||||
|
||||
backend = self.get_confirm_backend(confirm_type)
|
||||
ok, msg = backend.authenticate(secret_key, mfa_type)
|
||||
if ok:
|
||||
request.session['CONFIRM_LEVEL'] = ConfirmType.values.index(confirm_type) + 1
|
||||
request.session['CONFIRM_TIME'] = int(time.time())
|
||||
return Response('ok')
|
||||
return Response({'error': msg}, status=400)
|
||||
@@ -1,85 +1,104 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import urllib.parse
|
||||
import json
|
||||
from typing import Callable
|
||||
import abc
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
import ctypes
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import get_object_or_404
|
||||
import urllib.parse
|
||||
from django.http import HttpResponse
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from rest_framework.decorators import action
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework import serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from rest_framework.request import Request
|
||||
|
||||
from applications.models import Application
|
||||
from authentication.signals import post_auth_failed
|
||||
from common.utils import get_logger, random_string
|
||||
from common.mixins.api import SerializerMixin
|
||||
from common.utils.common import get_file_by_arch
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from common.drf.api import JMSModelViewSet
|
||||
from common.http import is_true
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from perms.models.base import Action
|
||||
from perms.utils.application.permission import get_application_actions
|
||||
from perms.utils.asset.permission import get_asset_actions
|
||||
from common.const.http import PATCH
|
||||
from terminal.models import EndpointRule
|
||||
from ..serializers import (
|
||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||
SuperConnectionTokenSerializer, ConnectionTokenDisplaySerializer,
|
||||
)
|
||||
from ..models import ConnectionToken
|
||||
|
||||
logger = get_logger(__name__)
|
||||
__all__ = ['UserConnectionTokenViewSet', 'TokenCacheMixin']
|
||||
__all__ = ['ConnectionTokenViewSet', 'SuperConnectionTokenViewSet']
|
||||
|
||||
|
||||
class ClientProtocolMixin:
|
||||
"""
|
||||
下载客户端支持的连接文件,里面包含了 token,和 其他连接信息
|
||||
|
||||
- [x] RDP
|
||||
- [ ] KoKo
|
||||
|
||||
本质上,这里还是暴露出 token 来,进行使用
|
||||
"""
|
||||
class ConnectionTokenMixin:
|
||||
request: Request
|
||||
get_serializer: Callable
|
||||
create_token: Callable
|
||||
get_serializer_context: Callable
|
||||
|
||||
@staticmethod
|
||||
def check_token_valid(token: ConnectionToken):
|
||||
is_valid, error = token.check_valid()
|
||||
if not is_valid:
|
||||
raise PermissionDenied(error)
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_request_resource_user(self, serializer):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_request_resources(self, serializer):
|
||||
user = self.get_request_resource_user(serializer)
|
||||
asset = serializer.validated_data.get('asset')
|
||||
application = serializer.validated_data.get('application')
|
||||
system_user = serializer.validated_data.get('system_user')
|
||||
return user, asset, application, system_user
|
||||
|
||||
@staticmethod
|
||||
def check_user_has_resource_permission(user, asset, application, system_user):
|
||||
from perms.utils.asset import has_asset_system_permission
|
||||
from perms.utils.application import has_application_system_permission
|
||||
|
||||
if asset and not has_asset_system_permission(user, asset, system_user):
|
||||
error = f'User not has this asset and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} asset={asset.id}'
|
||||
raise PermissionDenied(error)
|
||||
|
||||
if application and not has_application_system_permission(user, application, system_user):
|
||||
error = f'User not has this application and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} application={application.id}'
|
||||
raise PermissionDenied(error)
|
||||
|
||||
def get_smart_endpoint(self, protocol, asset=None, application=None):
|
||||
if asset:
|
||||
target_instance = asset
|
||||
target_ip = asset.get_target_ip()
|
||||
elif application:
|
||||
target_instance = application
|
||||
target_ip = application.get_target_ip()
|
||||
else:
|
||||
target_instance = None
|
||||
target_ip = ''
|
||||
endpoint = EndpointRule.match_endpoint(target_ip, protocol, self.request)
|
||||
endpoint = EndpointRule.match_endpoint(target_instance, target_ip, protocol, self.request)
|
||||
return endpoint
|
||||
|
||||
def get_request_resource(self, serializer):
|
||||
asset = serializer.validated_data.get('asset')
|
||||
application = serializer.validated_data.get('application')
|
||||
system_user = serializer.validated_data['system_user']
|
||||
|
||||
user = serializer.validated_data.get('user')
|
||||
if not user or not self.request.user.is_superuser:
|
||||
user = self.request.user
|
||||
return asset, application, system_user, user
|
||||
|
||||
@staticmethod
|
||||
def parse_env_bool(env_key, env_default, true_value, false_value):
|
||||
return true_value if is_true(os.getenv(env_key, env_default)) else false_value
|
||||
|
||||
def get_rdp_file_content(self, serializer):
|
||||
options = {
|
||||
def get_client_protocol_data(self, token: ConnectionToken):
|
||||
from assets.models import SystemUser
|
||||
protocol = token.system_user.protocol
|
||||
username = token.user.username
|
||||
rdp_config = ssh_token = ''
|
||||
if protocol == SystemUser.Protocol.rdp:
|
||||
filename, rdp_config = self.get_rdp_file_info(token)
|
||||
elif protocol == SystemUser.Protocol.ssh:
|
||||
filename, ssh_token = self.get_ssh_token(token)
|
||||
else:
|
||||
raise ValueError('Protocol not support: {}'.format(protocol))
|
||||
filename = urllib.parse.unquote(filename)
|
||||
return {
|
||||
"filename": filename,
|
||||
"protocol": protocol,
|
||||
"username": username,
|
||||
"token": ssh_token,
|
||||
"config": rdp_config
|
||||
}
|
||||
|
||||
def get_rdp_file_info(self, token: ConnectionToken):
|
||||
rdp_options = {
|
||||
'full address:s': '',
|
||||
'username:s': '',
|
||||
# 'screen mode id:i': '1',
|
||||
@@ -105,424 +124,216 @@ class ClientProtocolMixin:
|
||||
'bookmarktype:i': '3',
|
||||
'use redirection server name:i': '0',
|
||||
'smart sizing:i': '1',
|
||||
#'drivestoredirect:s': '*',
|
||||
# 'drivestoredirect:s': '*',
|
||||
# 'domain:s': ''
|
||||
# 'alternate shell:s:': '||MySQLWorkbench',
|
||||
# 'remoteapplicationname:s': 'Firefox',
|
||||
# 'remoteapplicationcmdline:s': '',
|
||||
}
|
||||
|
||||
asset, application, system_user, user = self.get_request_resource(serializer)
|
||||
# 设置磁盘挂载
|
||||
drives_redirect = is_true(self.request.query_params.get('drives_redirect'))
|
||||
if drives_redirect:
|
||||
actions = Action.choices_to_value(token.actions)
|
||||
if actions & Action.UPDOWNLOAD == Action.UPDOWNLOAD:
|
||||
rdp_options['drivestoredirect:s'] = '*'
|
||||
|
||||
# 设置全屏
|
||||
full_screen = is_true(self.request.query_params.get('full_screen'))
|
||||
rdp_options['screen mode id:i'] = '2' if full_screen else '1'
|
||||
|
||||
# 设置 RDP Server 地址
|
||||
endpoint = self.get_smart_endpoint(
|
||||
protocol='rdp', asset=token.asset, application=token.application
|
||||
)
|
||||
rdp_options['full address:s'] = f'{endpoint.host}:{endpoint.rdp_port}'
|
||||
|
||||
# 设置用户名
|
||||
rdp_options['username:s'] = '{}|{}'.format(token.user.username, str(token.id))
|
||||
if token.system_user.ad_domain:
|
||||
rdp_options['domain:s'] = token.system_user.ad_domain
|
||||
|
||||
# 设置宽高
|
||||
height = self.request.query_params.get('height')
|
||||
width = self.request.query_params.get('width')
|
||||
full_screen = is_true(self.request.query_params.get('full_screen'))
|
||||
drives_redirect = is_true(self.request.query_params.get('drives_redirect'))
|
||||
token, secret = self.create_token(user, asset, application, system_user)
|
||||
|
||||
# 设置磁盘挂载
|
||||
if drives_redirect:
|
||||
actions = 0
|
||||
if asset:
|
||||
actions = get_asset_actions(user, asset, system_user)
|
||||
elif application:
|
||||
actions = get_application_actions(user, application, system_user)
|
||||
|
||||
if actions & Action.UPDOWNLOAD == Action.UPDOWNLOAD:
|
||||
options['drivestoredirect:s'] = '*'
|
||||
|
||||
# 全屏
|
||||
options['screen mode id:i'] = '2' if full_screen else '1'
|
||||
|
||||
# RDP Server 地址
|
||||
endpoint = self.get_smart_endpoint(
|
||||
protocol='rdp', asset=asset, application=application
|
||||
)
|
||||
options['full address:s'] = f'{endpoint.host}:{endpoint.rdp_port}'
|
||||
# 用户名
|
||||
options['username:s'] = '{}|{}'.format(user.username, token)
|
||||
if system_user.ad_domain:
|
||||
options['domain:s'] = system_user.ad_domain
|
||||
# 宽高
|
||||
if width and height:
|
||||
options['desktopwidth:i'] = width
|
||||
options['desktopheight:i'] = height
|
||||
options['winposstr:s:'] = f'0,1,0,0,{width},{height}'
|
||||
rdp_options['desktopwidth:i'] = width
|
||||
rdp_options['desktopheight:i'] = height
|
||||
rdp_options['winposstr:s:'] = f'0,1,0,0,{width},{height}'
|
||||
|
||||
options['session bpp:i'] = os.getenv('JUMPSERVER_COLOR_DEPTH', '32')
|
||||
options['audiomode:i'] = self.parse_env_bool('JUMPSERVER_DISABLE_AUDIO', 'false', '2', '0')
|
||||
# 设置其他选项
|
||||
rdp_options['session bpp:i'] = os.getenv('JUMPSERVER_COLOR_DEPTH', '32')
|
||||
rdp_options['audiomode:i'] = self.parse_env_bool('JUMPSERVER_DISABLE_AUDIO', 'false', '2', '0')
|
||||
|
||||
if asset:
|
||||
name = asset.hostname
|
||||
elif application:
|
||||
name = application.name
|
||||
application.get_rdp_remote_app_setting()
|
||||
|
||||
app = f'||jmservisor'
|
||||
options['remoteapplicationmode:i'] = '1'
|
||||
options['alternate shell:s'] = app
|
||||
options['remoteapplicationprogram:s'] = app
|
||||
options['remoteapplicationname:s'] = name
|
||||
options['remoteapplicationcmdline:s'] = '- ' + self.get_encrypt_cmdline(application)
|
||||
if token.asset:
|
||||
name = token.asset.hostname
|
||||
elif token.application and token.application.category_remote_app:
|
||||
app = '||jmservisor'
|
||||
name = token.application.name
|
||||
rdp_options['remoteapplicationmode:i'] = '1'
|
||||
rdp_options['alternate shell:s'] = app
|
||||
rdp_options['remoteapplicationprogram:s'] = app
|
||||
rdp_options['remoteapplicationname:s'] = name
|
||||
else:
|
||||
name = '*'
|
||||
prefix_name = f'{token.user.username}-{name}'
|
||||
filename = self.get_connect_filename(prefix_name)
|
||||
|
||||
content = ''
|
||||
for k, v in options.items():
|
||||
for k, v in rdp_options.items():
|
||||
content += f'{k}:{v}\n'
|
||||
return name, content
|
||||
|
||||
def get_ssh_token(self, serializer):
|
||||
asset, application, system_user, user = self.get_request_resource(serializer)
|
||||
token, secret = self.create_token(user, asset, application, system_user)
|
||||
if asset:
|
||||
name = asset.hostname
|
||||
elif application:
|
||||
name = application.name
|
||||
return filename, content
|
||||
|
||||
@staticmethod
|
||||
def get_connect_filename(prefix_name):
|
||||
prefix_name = prefix_name.replace('/', '_')
|
||||
prefix_name = prefix_name.replace('\\', '_')
|
||||
prefix_name = prefix_name.replace('.', '_')
|
||||
filename = f'{prefix_name}-jumpserver'
|
||||
filename = urllib.parse.quote(filename)
|
||||
return filename
|
||||
|
||||
def get_ssh_token(self, token: ConnectionToken):
|
||||
if token.asset:
|
||||
name = token.asset.hostname
|
||||
elif token.application:
|
||||
name = token.application.name
|
||||
else:
|
||||
name = '*'
|
||||
prefix_name = f'{token.user.username}-{name}'
|
||||
filename = self.get_connect_filename(prefix_name)
|
||||
|
||||
endpoint = self.get_smart_endpoint(
|
||||
protocol='ssh', asset=asset, application=application
|
||||
protocol='ssh', asset=token.asset, application=token.application
|
||||
)
|
||||
content = {
|
||||
data = {
|
||||
'ip': endpoint.host,
|
||||
'port': str(endpoint.ssh_port),
|
||||
'username': f'JMS-{token}',
|
||||
'password': secret
|
||||
'username': 'JMS-{}'.format(str(token.id)),
|
||||
'password': token.secret
|
||||
}
|
||||
token = json.dumps(content)
|
||||
return name, token
|
||||
|
||||
def get_encrypt_cmdline(self, app: Application):
|
||||
parameters = app.get_rdp_remote_app_setting()['parameters']
|
||||
parameters = parameters.encode('ascii')
|
||||
|
||||
lib_path = get_file_by_arch('xpack/libs', 'librailencrypt.so')
|
||||
lib = ctypes.CDLL(lib_path)
|
||||
lib.encrypt.argtypes = [ctypes.c_char_p, ctypes.c_int]
|
||||
lib.encrypt.restype = ctypes.c_char_p
|
||||
|
||||
rst = lib.encrypt(parameters, len(parameters))
|
||||
rst = rst.decode('ascii')
|
||||
return rst
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='rdp/file')
|
||||
def get_rdp_file(self, request, *args, **kwargs):
|
||||
if self.request.method == 'GET':
|
||||
data = self.request.query_params
|
||||
else:
|
||||
data = self.request.data
|
||||
serializer = self.get_serializer(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
name, data = self.get_rdp_file_content(serializer)
|
||||
response = HttpResponse(data, content_type='application/octet-stream')
|
||||
filename = "{}-{}-jumpserver.rdp".format(self.request.user.username, name)
|
||||
filename = urllib.parse.quote(filename)
|
||||
response['Content-Disposition'] = 'attachment; filename*=UTF-8\'\'%s' % filename
|
||||
return response
|
||||
|
||||
def get_valid_serializer(self):
|
||||
if self.request.method == 'GET':
|
||||
data = self.request.query_params
|
||||
else:
|
||||
data = self.request.data
|
||||
serializer = self.get_serializer(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer
|
||||
|
||||
def get_client_protocol_data(self, serializer):
|
||||
asset, application, system_user, user = self.get_request_resource(serializer)
|
||||
protocol = system_user.protocol
|
||||
username = user.username
|
||||
config, token = '', ''
|
||||
if protocol == 'rdp':
|
||||
name, config = self.get_rdp_file_content(serializer)
|
||||
elif protocol == 'ssh':
|
||||
name, token = self.get_ssh_token(serializer)
|
||||
else:
|
||||
raise ValueError('Protocol not support: {}'.format(protocol))
|
||||
|
||||
filename = "{}-{}-jumpserver".format(username, name)
|
||||
data = {
|
||||
"filename": filename,
|
||||
"protocol": system_user.protocol,
|
||||
"username": username,
|
||||
"token": token,
|
||||
"config": config
|
||||
}
|
||||
return data
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='client-url')
|
||||
def get_client_protocol_url(self, request, *args, **kwargs):
|
||||
serializer = self.get_valid_serializer()
|
||||
try:
|
||||
protocol_data = self.get_client_protocol_data(serializer)
|
||||
except ValueError as e:
|
||||
return Response({'error': str(e)}, status=401)
|
||||
|
||||
protocol_data = json.dumps(protocol_data).encode()
|
||||
protocol_data = base64.b64encode(protocol_data).decode()
|
||||
data = {
|
||||
'url': 'jms://{}'.format(protocol_data),
|
||||
}
|
||||
return Response(data=data)
|
||||
token = json.dumps(data)
|
||||
return filename, token
|
||||
|
||||
|
||||
class SecretDetailMixin:
|
||||
valid_token: Callable
|
||||
request: Request
|
||||
get_serializer: Callable
|
||||
|
||||
@staticmethod
|
||||
def _get_application_secret_detail(application):
|
||||
gateway = None
|
||||
remote_app = None
|
||||
asset = None
|
||||
|
||||
if application.category_remote_app:
|
||||
remote_app = application.get_rdp_remote_app_setting()
|
||||
asset = application.get_remote_app_asset()
|
||||
domain = asset.domain
|
||||
else:
|
||||
domain = application.domain
|
||||
|
||||
if domain and domain.has_gateway():
|
||||
gateway = domain.random_gateway()
|
||||
|
||||
return {
|
||||
'asset': asset,
|
||||
'application': application,
|
||||
'gateway': gateway,
|
||||
'domain': domain,
|
||||
'remote_app': remote_app,
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _get_asset_secret_detail(asset):
|
||||
gateway = None
|
||||
if asset and asset.domain and asset.domain.has_gateway():
|
||||
gateway = asset.domain.random_gateway()
|
||||
|
||||
return {
|
||||
'asset': asset,
|
||||
'application': None,
|
||||
'domain': asset.domain,
|
||||
'gateway': gateway,
|
||||
'remote_app': None,
|
||||
}
|
||||
|
||||
@action(methods=['POST'], detail=False, url_path='secret-info/detail')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
perm_required = 'authentication.view_connectiontokensecret'
|
||||
|
||||
# 非常重要的 api,再逻辑层再判断一下,双重保险
|
||||
if not request.user.has_perm(perm_required):
|
||||
raise PermissionDenied('Not allow to view secret')
|
||||
|
||||
token = request.data.get('token', '')
|
||||
try:
|
||||
value, user, system_user, asset, app, expired_at, actions = self.valid_token(token)
|
||||
except serializers.ValidationError as e:
|
||||
post_auth_failed.send(
|
||||
sender=self.__class__, username='', request=self.request,
|
||||
reason=_('Invalid token')
|
||||
)
|
||||
raise e
|
||||
|
||||
data = dict(
|
||||
id=token, secret=value.get('secret', ''),
|
||||
user=user, system_user=system_user,
|
||||
expired_at=expired_at, actions=actions
|
||||
)
|
||||
cmd_filter_kwargs = {
|
||||
'system_user_id': system_user.id,
|
||||
'user_id': user.id,
|
||||
}
|
||||
if asset:
|
||||
asset_detail = self._get_asset_secret_detail(asset)
|
||||
system_user.load_asset_more_auth(asset.id, user.username, user.id)
|
||||
data['type'] = 'asset'
|
||||
data.update(asset_detail)
|
||||
cmd_filter_kwargs['asset_id'] = asset.id
|
||||
else:
|
||||
app_detail = self._get_application_secret_detail(app)
|
||||
system_user.load_app_more_auth(app.id, user.username, user.id)
|
||||
data['type'] = 'application'
|
||||
data.update(app_detail)
|
||||
cmd_filter_kwargs['application_id'] = app.id
|
||||
|
||||
from assets.models import CommandFilterRule
|
||||
cmd_filter_rules = CommandFilterRule.get_queryset(**cmd_filter_kwargs)
|
||||
data['cmd_filter_rules'] = cmd_filter_rules
|
||||
|
||||
serializer = self.get_serializer(data)
|
||||
return Response(data=serializer.data, status=200)
|
||||
|
||||
|
||||
class TokenCacheMixin:
|
||||
""" endpoint smart view 用到此类来解析token中的资产、应用 """
|
||||
CACHE_KEY_PREFIX = 'CONNECTION_TOKEN_{}'
|
||||
|
||||
def get_token_cache_key(self, token):
|
||||
return self.CACHE_KEY_PREFIX.format(token)
|
||||
|
||||
def get_token_ttl(self, token):
|
||||
key = self.get_token_cache_key(token)
|
||||
return cache.ttl(key)
|
||||
|
||||
def set_token_to_cache(self, token, value, ttl=5*60):
|
||||
key = self.get_token_cache_key(token)
|
||||
cache.set(key, value, timeout=ttl)
|
||||
|
||||
def get_token_from_cache(self, token):
|
||||
key = self.get_token_cache_key(token)
|
||||
value = cache.get(key, None)
|
||||
return value
|
||||
|
||||
def renewal_token(self, token, ttl=5*60):
|
||||
value = self.get_token_from_cache(token)
|
||||
if value:
|
||||
pre_ttl = self.get_token_ttl(token)
|
||||
self.set_token_to_cache(token, value, ttl)
|
||||
post_ttl = self.get_token_ttl(token)
|
||||
ok = True
|
||||
msg = f'{pre_ttl}s is renewed to {post_ttl}s.'
|
||||
else:
|
||||
ok = False
|
||||
msg = 'Token is not found.'
|
||||
data = {
|
||||
'ok': ok,
|
||||
'msg': msg
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
class UserConnectionTokenViewSet(
|
||||
RootOrgViewMixin, SerializerMixin, ClientProtocolMixin,
|
||||
SecretDetailMixin, TokenCacheMixin, GenericViewSet
|
||||
):
|
||||
class ConnectionTokenViewSet(ConnectionTokenMixin, RootOrgViewMixin, JMSModelViewSet):
|
||||
filterset_fields = (
|
||||
'type', 'user_display', 'system_user_display',
|
||||
'application_display', 'asset_display'
|
||||
)
|
||||
search_fields = filterset_fields
|
||||
serializer_classes = {
|
||||
'default': ConnectionTokenSerializer,
|
||||
'list': ConnectionTokenDisplaySerializer,
|
||||
'retrieve': ConnectionTokenDisplaySerializer,
|
||||
'get_secret_detail': ConnectionTokenSecretSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'GET': 'authentication.view_connectiontoken',
|
||||
'retrieve': 'authentication.view_connectiontoken',
|
||||
'create': 'authentication.add_connectiontoken',
|
||||
'renewal': 'authentication.add_superconnectiontoken',
|
||||
'expire': 'authentication.add_connectiontoken',
|
||||
'get_secret_detail': 'authentication.view_connectiontokensecret',
|
||||
'get_rdp_file': 'authentication.add_connectiontoken',
|
||||
'get_client_protocol_url': 'authentication.add_connectiontoken',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def check_resource_permission(user, asset, application, system_user):
|
||||
from perms.utils.asset import has_asset_system_permission
|
||||
from perms.utils.application import has_application_system_permission
|
||||
def get_queryset(self):
|
||||
return ConnectionToken.objects.filter(user=self.request.user)
|
||||
|
||||
if asset and not has_asset_system_permission(user, asset, system_user):
|
||||
error = f'User not has this asset and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} asset={asset.id}'
|
||||
raise PermissionDenied(error)
|
||||
if application and not has_application_system_permission(user, application, system_user):
|
||||
error = f'User not has this application and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} application={application.id}'
|
||||
raise PermissionDenied(error)
|
||||
return True
|
||||
def get_request_resource_user(self, serializer):
|
||||
return self.request.user
|
||||
|
||||
@action(methods=[PATCH], detail=False)
|
||||
def renewal(self, request, *args, **kwargs):
|
||||
""" 续期 Token """
|
||||
perm_required = 'authentication.add_superconnectiontoken'
|
||||
if not request.user.has_perm(perm_required):
|
||||
raise PermissionDenied('No permissions for authentication.add_superconnectiontoken')
|
||||
token = request.data.get('token', '')
|
||||
data = self.renewal_token(token)
|
||||
status_code = 200 if data.get('ok') else 404
|
||||
return Response(data=data, status=status_code)
|
||||
|
||||
def create_token(self, user, asset, application, system_user, ttl=5*60):
|
||||
# 再次强调一下权限
|
||||
perm_required = 'authentication.add_superconnectiontoken'
|
||||
if user != self.request.user and not self.request.user.has_perm(perm_required):
|
||||
raise PermissionDenied('Only can create user token')
|
||||
self.check_resource_permission(user, asset, application, system_user)
|
||||
token = random_string(36)
|
||||
secret = random_string(16)
|
||||
value = {
|
||||
'id': token,
|
||||
'secret': secret,
|
||||
'user': str(user.id),
|
||||
'username': user.username,
|
||||
'system_user': str(system_user.id),
|
||||
'system_user_name': system_user.name,
|
||||
'created_by': str(self.request.user),
|
||||
'date_created': str(timezone.now())
|
||||
}
|
||||
|
||||
if asset:
|
||||
value.update({
|
||||
'type': 'asset',
|
||||
'asset': str(asset.id),
|
||||
'hostname': asset.hostname,
|
||||
})
|
||||
elif application:
|
||||
value.update({
|
||||
'type': 'application',
|
||||
'application': application.id,
|
||||
'application_name': str(application)
|
||||
})
|
||||
|
||||
self.set_token_to_cache(token, value, ttl)
|
||||
return token, secret
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
asset, application, system_user, user = self.get_request_resource(serializer)
|
||||
token, secret = self.create_token(user, asset, application, system_user)
|
||||
tp = 'app' if application else 'asset'
|
||||
data = {
|
||||
"id": token, 'secret': secret,
|
||||
'type': tp, 'protocol': system_user.protocol,
|
||||
'expire_time': self.get_token_ttl(token),
|
||||
}
|
||||
return Response(data, status=201)
|
||||
|
||||
def valid_token(self, token):
|
||||
from users.models import User
|
||||
from assets.models import SystemUser, Asset
|
||||
from applications.models import Application
|
||||
from perms.utils.asset.permission import validate_permission as asset_validate_permission
|
||||
from perms.utils.application.permission import validate_permission as app_validate_permission
|
||||
|
||||
value = self.get_token_from_cache(token)
|
||||
if not value:
|
||||
raise serializers.ValidationError('Token not found')
|
||||
|
||||
user = get_object_or_404(User, id=value.get('user'))
|
||||
if not user.is_valid:
|
||||
raise serializers.ValidationError("User not valid, disabled or expired")
|
||||
|
||||
system_user = get_object_or_404(SystemUser, id=value.get('system_user'))
|
||||
asset = None
|
||||
app = None
|
||||
if value.get('type') == 'asset':
|
||||
asset = get_object_or_404(Asset, id=value.get('asset'))
|
||||
if not asset.is_active:
|
||||
raise serializers.ValidationError("Asset disabled")
|
||||
has_perm, actions, expired_at = asset_validate_permission(user, asset, system_user)
|
||||
def get_object(self):
|
||||
if self.request.user.is_service_account:
|
||||
# TODO: 组件获取 token 详情,将来放在 Super-connection-token API 中
|
||||
obj = get_object_or_404(ConnectionToken, pk=self.kwargs.get('pk'))
|
||||
else:
|
||||
app = get_object_or_404(Application, id=value.get('application'))
|
||||
has_perm, actions, expired_at = app_validate_permission(user, app, system_user)
|
||||
obj = super(ConnectionTokenViewSet, self).get_object()
|
||||
return obj
|
||||
|
||||
if not has_perm:
|
||||
raise serializers.ValidationError('Permission expired or invalid')
|
||||
return value, user, system_user, asset, app, expired_at, actions
|
||||
def create_connection_token(self):
|
||||
data = self.request.query_params if self.request.method == 'GET' else self.request.data
|
||||
serializer = self.get_serializer(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_create(serializer)
|
||||
token: ConnectionToken = serializer.instance
|
||||
return token
|
||||
|
||||
def get(self, request):
|
||||
token = request.query_params.get('token')
|
||||
value = self.get_token_from_cache(token)
|
||||
if not value:
|
||||
return Response('', status=404)
|
||||
return Response(value)
|
||||
def perform_create(self, serializer):
|
||||
user, asset, application, system_user = self.get_request_resources(serializer)
|
||||
self.check_user_has_resource_permission(user, asset, application, system_user)
|
||||
return super(ConnectionTokenViewSet, self).perform_create(serializer)
|
||||
|
||||
@action(methods=['POST'], detail=False, url_path='secret-info/detail')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
# 非常重要的 api,在逻辑层再判断一下,双重保险
|
||||
perm_required = 'authentication.view_connectiontokensecret'
|
||||
if not request.user.has_perm(perm_required):
|
||||
raise PermissionDenied('Not allow to view secret')
|
||||
token_id = request.data.get('token') or ''
|
||||
token = get_object_or_404(ConnectionToken, pk=token_id)
|
||||
self.check_token_valid(token)
|
||||
token.load_system_user_auth()
|
||||
serializer = self.get_serializer(instance=token)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='rdp/file')
|
||||
def get_rdp_file(self, request, *args, **kwargs):
|
||||
token = self.create_connection_token()
|
||||
self.check_token_valid(token)
|
||||
filename, content = self.get_rdp_file_info(token)
|
||||
filename = '{}.rdp'.format(filename)
|
||||
response = HttpResponse(content, content_type='application/octet-stream')
|
||||
response['Content-Disposition'] = 'attachment; filename*=UTF-8\'\'%s' % filename
|
||||
return response
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='client-url')
|
||||
def get_client_protocol_url(self, request, *args, **kwargs):
|
||||
token = self.create_connection_token()
|
||||
self.check_token_valid(token)
|
||||
try:
|
||||
protocol_data = self.get_client_protocol_data(token)
|
||||
except ValueError as e:
|
||||
return Response(data={'error': str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
protocol_data = json.dumps(protocol_data).encode()
|
||||
protocol_data = base64.b64encode(protocol_data).decode()
|
||||
data = {
|
||||
'url': 'jms://{}'.format(protocol_data)
|
||||
}
|
||||
return Response(data=data)
|
||||
|
||||
@action(methods=['PATCH'], detail=True)
|
||||
def expire(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
instance.expire()
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
|
||||
class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
||||
serializer_classes = {
|
||||
'default': SuperConnectionTokenSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'create': 'authentication.add_superconnectiontoken',
|
||||
'renewal': 'authentication.add_superconnectiontoken'
|
||||
}
|
||||
|
||||
def get_request_resource_user(self, serializer):
|
||||
return serializer.validated_data.get('user')
|
||||
|
||||
@action(methods=['PATCH'], detail=False)
|
||||
def renewal(self, request, *args, **kwargs):
|
||||
from common.utils.timezone import as_current_tz
|
||||
|
||||
token_id = request.data.get('token') or ''
|
||||
token = get_object_or_404(ConnectionToken, pk=token_id)
|
||||
date_expired = as_current_tz(token.date_expired)
|
||||
if token.is_expired:
|
||||
raise PermissionDenied('Token is expired at: {}'.format(date_expired))
|
||||
token.renewal()
|
||||
data = {
|
||||
'ok': True,
|
||||
'msg': f'Token is renewed, date expired: {date_expired}'
|
||||
}
|
||||
return Response(data=data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -2,10 +2,11 @@ from rest_framework.views import APIView
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from users.permissions import IsAuthPasswdTimeValid
|
||||
from users.models import User
|
||||
from common.utils import get_logger
|
||||
from common.permissions import UserConfirmation
|
||||
from common.mixins.api import RoleUserMixin, RoleAdminMixin
|
||||
from authentication.const import ConfirmType
|
||||
from authentication import errors
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -26,9 +27,8 @@ class DingTalkQRUnBindBase(APIView):
|
||||
|
||||
|
||||
class DingTalkQRUnBindForUserApi(RoleUserMixin, DingTalkQRUnBindBase):
|
||||
permission_classes = (IsAuthPasswdTimeValid,)
|
||||
permission_classes = (UserConfirmation.require(ConfirmType.ReLogin),)
|
||||
|
||||
|
||||
class DingTalkQRUnBindForAdminApi(RoleAdminMixin, DingTalkQRUnBindBase):
|
||||
user_id_url_kwarg = 'user_id'
|
||||
|
||||
@@ -2,10 +2,11 @@ from rest_framework.views import APIView
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
|
||||
from users.permissions import IsAuthPasswdTimeValid
|
||||
from users.models import User
|
||||
from common.utils import get_logger
|
||||
from common.permissions import UserConfirmation
|
||||
from common.mixins.api import RoleUserMixin, RoleAdminMixin
|
||||
from authentication.const import ConfirmType
|
||||
from authentication import errors
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -26,7 +27,7 @@ class FeiShuQRUnBindBase(APIView):
|
||||
|
||||
|
||||
class FeiShuQRUnBindForUserApi(RoleUserMixin, FeiShuQRUnBindBase):
|
||||
permission_classes = (IsAuthPasswdTimeValid,)
|
||||
permission_classes = (UserConfirmation.require(ConfirmType.ReLogin),)
|
||||
|
||||
|
||||
class FeiShuQRUnBindForAdminApi(RoleAdminMixin, FeiShuQRUnBindBase):
|
||||
|
||||
@@ -6,6 +6,8 @@ from rest_framework.permissions import AllowAny
|
||||
|
||||
from common.utils import get_logger
|
||||
from .. import errors, mixins
|
||||
from django.contrib.auth import logout as auth_logout
|
||||
|
||||
|
||||
__all__ = ['TicketStatusApi']
|
||||
logger = get_logger(__name__)
|
||||
@@ -17,7 +19,15 @@ class TicketStatusApi(mixins.AuthMixin, APIView):
|
||||
def get(self, request, *args, **kwargs):
|
||||
try:
|
||||
self.check_user_login_confirm()
|
||||
self.request.session['auth_third_party_done'] = 1
|
||||
return Response({"msg": "ok"})
|
||||
except errors.LoginConfirmOtherError as e:
|
||||
reason = e.msg
|
||||
username = e.username
|
||||
self.send_auth_signal(success=False, username=username, reason=reason)
|
||||
# 若为三方登录,此时应退出登录
|
||||
auth_logout(request)
|
||||
return Response(e.as_data(), status=200)
|
||||
except errors.NeedMoreInfoError as e:
|
||||
return Response(e.as_data(), status=200)
|
||||
|
||||
@@ -25,5 +35,5 @@ class TicketStatusApi(mixins.AuthMixin, APIView):
|
||||
ticket = self.get_ticket()
|
||||
if ticket:
|
||||
request.session.pop('auth_ticket_id', '')
|
||||
ticket.close(processor=self.get_user_from_session())
|
||||
ticket.close()
|
||||
return Response('', status=200)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user