mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-15 16:42:34 +00:00
Compare commits
199 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d042de7b09 | ||
|
|
5e6e97c822 | ||
|
|
f146873501 | ||
|
|
35dfdf831a | ||
|
|
2b31cb2806 | ||
|
|
e43ffa7994 | ||
|
|
b0a9a83231 | ||
|
|
7da14571ac | ||
|
|
73b67da4c0 | ||
|
|
4bf2371cf0 | ||
|
|
075cbc497b | ||
|
|
1a0d9a20f9 | ||
|
|
fdb8416cac | ||
|
|
e2d5b69510 | ||
|
|
9944474ba0 | ||
|
|
ce6b9de07c | ||
|
|
b97759687d | ||
|
|
68b6236de2 | ||
|
|
6616374c30 | ||
|
|
682f6b2fb9 | ||
|
|
a2e3979916 | ||
|
|
f11d3c1cf2 | ||
|
|
f0bad5f107 | ||
|
|
ad3bc72dfb | ||
|
|
de9c69843d | ||
|
|
d2678e2a43 | ||
|
|
632ea87f07 | ||
|
|
4e7e1d5e15 | ||
|
|
1ac8537a34 | ||
|
|
dcaa798c2e | ||
|
|
8da4027e32 | ||
|
|
32e2d19553 | ||
|
|
48d1eecc08 | ||
|
|
0ab88ce754 | ||
|
|
bee5500425 | ||
|
|
7c03af7668 | ||
|
|
7a61a671a2 | ||
|
|
4a1fc0e2ac | ||
|
|
1e5e87e62a | ||
|
|
96c3b81383 | ||
|
|
297fedeffa | ||
|
|
9cd5675209 | ||
|
|
a5179d1596 | ||
|
|
c2463fe573 | ||
|
|
2f8042141c | ||
|
|
06a4e0d395 | ||
|
|
bb9d92fd7e | ||
|
|
749f9d3f81 | ||
|
|
03ad7777d0 | ||
|
|
7e4f20f443 | ||
|
|
607b7fd29f | ||
|
|
8895763ab4 | ||
|
|
8b1e202e68 | ||
|
|
32fe8f674c | ||
|
|
b4ef7bef55 | ||
|
|
31982c6547 | ||
|
|
67d3b63c6d | ||
|
|
f34fb5d9d5 | ||
|
|
3ec78ff9be | ||
|
|
f361621ab5 | ||
|
|
cd9587f68e | ||
|
|
2ff01a4bb3 | ||
|
|
06ed358fbc | ||
|
|
3e11249e8c | ||
|
|
6b5435b768 | ||
|
|
7d5a13de38 | ||
|
|
07bd44990b | ||
|
|
e4938ffc85 | ||
|
|
85d226eb07 | ||
|
|
c9a9ca7923 | ||
|
|
306f7a08d1 | ||
|
|
b86f9ac871 | ||
|
|
2562386fe0 | ||
|
|
61d4311e24 | ||
|
|
370e1628be | ||
|
|
adf5c4a7b9 | ||
|
|
9fc1ae7b6d | ||
|
|
313757dbe9 | ||
|
|
b32e352b24 | ||
|
|
b950b48112 | ||
|
|
519eb3bef2 | ||
|
|
4e55d0f1e4 | ||
|
|
2b3bb65114 | ||
|
|
b597cfcd19 | ||
|
|
33952b2333 | ||
|
|
a47a9c0345 | ||
|
|
4e0c056867 | ||
|
|
a9b5599db5 | ||
|
|
8a2eb70ad2 | ||
|
|
776234e8cc | ||
|
|
e2406955bc | ||
|
|
dba9550bc0 | ||
|
|
6ad1362a3f | ||
|
|
dfa2f7d6c9 | ||
|
|
c55e2db75e | ||
|
|
fd3a4d887e | ||
|
|
42afc1e0bf | ||
|
|
50c89431df | ||
|
|
f1f5017be3 | ||
|
|
9b85aafa52 | ||
|
|
817268d7cd | ||
|
|
d3bbfdc458 | ||
|
|
18a390d66a | ||
|
|
73b57a662e | ||
|
|
ea325f6e52 | ||
|
|
1216f15e45 | ||
|
|
cc3911d2f1 | ||
|
|
36c083f674 | ||
|
|
98c6a93658 | ||
|
|
adc607dafe | ||
|
|
1e85805ea3 | ||
|
|
957d3660ce | ||
|
|
049f6dca67 | ||
|
|
7f4377b0e8 | ||
|
|
7dfd0ee8fe | ||
|
|
41f375a4f7 | ||
|
|
a50dfe9c18 | ||
|
|
bd8a1a7d0e | ||
|
|
5546719712 | ||
|
|
068b39d922 | ||
|
|
2e1763cce7 | ||
|
|
ff9e470ce2 | ||
|
|
3080bf3647 | ||
|
|
0b04821794 | ||
|
|
296bb88834 | ||
|
|
c57cce8881 | ||
|
|
174cc16980 | ||
|
|
5b2649f775 | ||
|
|
83829df70c | ||
|
|
64641a18e6 | ||
|
|
09303ecc56 | ||
|
|
5f48e7aeb2 | ||
|
|
25dfce621b | ||
|
|
102d3b590b | ||
|
|
a45f581b0e | ||
|
|
b3991d0388 | ||
|
|
184e8b31e6 | ||
|
|
615bcadf62 | ||
|
|
7b2f813e7f | ||
|
|
81170b4b7b | ||
|
|
c4eacbabc6 | ||
|
|
ccb0509d85 | ||
|
|
886393c539 | ||
|
|
15b0ad9c12 | ||
|
|
19e2a5b9f9 | ||
|
|
0aa2c2016f | ||
|
|
935947c97a | ||
|
|
3e7e01418d | ||
|
|
7f42e59714 | ||
|
|
840e5e8863 | ||
|
|
24fb8b2a89 | ||
|
|
c1bf854824 | ||
|
|
ab23a357f7 | ||
|
|
78bf6f5817 | ||
|
|
91a26abf9e | ||
|
|
d7e7c62c7a | ||
|
|
09bdff4a67 | ||
|
|
56328e112a | ||
|
|
1d15f7125e | ||
|
|
e6b17da57d | ||
|
|
1870fc97d5 | ||
|
|
f548b4bd2b | ||
|
|
a56ac7b34e | ||
|
|
51c9a89b1f | ||
|
|
6f3ead3c42 | ||
|
|
1036d1c132 | ||
|
|
5de5fa2e96 | ||
|
|
19043d0a66 | ||
|
|
bc3e50a529 | ||
|
|
a7ab7da61c | ||
|
|
b483f78d52 | ||
|
|
88d8a3326f | ||
|
|
8f7dcd512a | ||
|
|
d795867916 | ||
|
|
4c4f544f0d | ||
|
|
8ec26dea43 | ||
|
|
799d1e4043 | ||
|
|
b03642847e | ||
|
|
a4e635bff0 | ||
|
|
83cc339d4b | ||
|
|
bb9790a50f | ||
|
|
9be3cbb936 | ||
|
|
e599bca951 | ||
|
|
501ad698b7 | ||
|
|
50e6c96358 | ||
|
|
7cf6e54f01 | ||
|
|
709e7af953 | ||
|
|
93474766f6 | ||
|
|
542eb25e7b | ||
|
|
609d2710fa | ||
|
|
d852d2f670 | ||
|
|
087a3f2914 | ||
|
|
36f113e307 | ||
|
|
23afe81ff5 | ||
|
|
dd5b2b9101 | ||
|
|
d363118911 | ||
|
|
351d4d8123 | ||
|
|
efb9f48c6f | ||
|
|
d04b90b8e8 |
@@ -23,6 +23,7 @@ RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& apt update \
|
||||
&& grep -v '^#' ./requirements/deb_buster_requirements.txt | xargs apt -y install \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
|
||||
|
||||
|
||||
187
README.md
187
README.md
@@ -1,121 +1,17 @@
|
||||
# JumpServer 多云环境下更好用的堡垒机
|
||||
|
||||
[](https://www.python.org/)
|
||||
[](https://www.djangoproject.com/)
|
||||
[](https://www.gnu.org/licenses/old-licenses/gpl-2.0.html)
|
||||
[](https://github.com/jumpserver/jumpserver/releases)
|
||||
[](https://hub.docker.com/u/jumpserver)
|
||||
|
||||
- [ENGLISH](https://github.com/jumpserver/jumpserver/blob/master/README_EN.md)
|
||||
|
||||
## 紧急BUG修复通知
|
||||
JumpServer发现远程执行漏洞,请速度修复
|
||||
|
||||
非常感谢 **reactivity of Alibaba Hackerone bug bounty program**(瑞典) 向我们报告了此 BUG
|
||||
|
||||
**影响版本:**
|
||||
```
|
||||
< v2.6.2
|
||||
< v2.5.4
|
||||
< v2.4.5
|
||||
= v1.5.9
|
||||
>= v1.5.3
|
||||
```
|
||||
**安全版本:**
|
||||
```
|
||||
>= v2.6.2
|
||||
>= v2.5.4
|
||||
>= v2.4.5
|
||||
= v1.5.9 (版本号没变)
|
||||
< v1.5.3
|
||||
```
|
||||
|
||||
**修复方案:**
|
||||
|
||||
将JumpServer升级至安全版本;
|
||||
|
||||
**临时修复方案:**
|
||||
|
||||
修改 Nginx 配置文件屏蔽漏洞接口
|
||||
|
||||
```
|
||||
/api/v1/authentication/connection-token/
|
||||
/api/v1/users/connection-token/
|
||||
```
|
||||
|
||||
Nginx 配置文件位置
|
||||
```
|
||||
# 社区老版本
|
||||
/etc/nginx/conf.d/jumpserver.conf
|
||||
|
||||
# 企业老版本
|
||||
jumpserver-release/nginx/http_server.conf
|
||||
|
||||
# 新版本在
|
||||
jumpserver-release/compose/config_static/http_server.conf
|
||||
```
|
||||
|
||||
修改 Nginx 配置文件实例
|
||||
```
|
||||
### 保证在 /api 之前 和 / 之前
|
||||
location /api/v1/authentication/connection-token/ {
|
||||
return 403;
|
||||
}
|
||||
|
||||
location /api/v1/users/connection-token/ {
|
||||
return 403;
|
||||
}
|
||||
### 新增以上这些
|
||||
|
||||
location /api/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://core:8080;
|
||||
}
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
修改完成后重启 nginx
|
||||
|
||||
```
|
||||
docker方式:
|
||||
docker restart jms_nginx
|
||||
|
||||
nginx方式:
|
||||
systemctl restart nginx
|
||||
|
||||
```
|
||||
|
||||
**修复验证**
|
||||
|
||||
```
|
||||
$ wget https://github.com/jumpserver/jumpserver/releases/download/v2.6.2/jms_bug_check.sh
|
||||
|
||||
# 使用方法 bash jms_bug_check.sh HOST
|
||||
$ bash jms_bug_check.sh demo.jumpserver.org
|
||||
漏洞已修复
|
||||
```
|
||||
|
||||
**入侵检测**
|
||||
|
||||
下载脚本到 jumpserver 日志目录,这个目录中存在 gunicorn.log,然后执行
|
||||
|
||||
```
|
||||
$ pwd
|
||||
/opt/jumpserver/core/logs
|
||||
|
||||
$ ls gunicorn.log
|
||||
gunicorn.log
|
||||
|
||||
$ wget 'https://github.com/jumpserver/jumpserver/releases/download/v2.6.2/jms_check_attack.sh'
|
||||
$ bash jms_check_attack.sh
|
||||
系统未被入侵
|
||||
```
|
||||
|安全通知|
|
||||
|------------------|
|
||||
|2021年1月15日 JumpServer 发现远程执行漏洞,请速度修复 [详见](https://github.com/jumpserver/jumpserver/issues/5533), 非常感谢 **reactivity of Alibaba Hackerone bug bounty program**(瑞典) 向我们报告了此 BUG|
|
||||
|
||||
--------------------------
|
||||
|
||||
JumpServer 正在寻找开发者,一起为改变世界做些贡献吧,哪怕一点点,联系我 <ibuler@fit2cloud.com>
|
||||
|
||||
JumpServer 是全球首款开源的堡垒机,使用 GNU GPL v2.0 开源协议,是符合 4A 规范的运维安全审计系统。
|
||||
|
||||
JumpServer 使用 Python / Django 为主进行开发,遵循 Web 2.0 规范,配备了业界领先的 Web Terminal 方案,交互界面美观、用户体验好。
|
||||
@@ -124,7 +20,6 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
|
||||
改变世界,从一点点开始。
|
||||
|
||||
> 注: [KubeOperator](https://github.com/KubeOperator/KubeOperator) 是 JumpServer 团队在 Kubernetes 领域的的又一全新力作,欢迎关注和使用。
|
||||
|
||||
## 特色优势
|
||||
|
||||
@@ -136,21 +31,6 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
- 多租户: 一套系统,多个子公司和部门同时使用;
|
||||
- 多应用支持: 数据库,Windows远程应用,Kubernetes。
|
||||
|
||||
## 版本说明
|
||||
|
||||
自 v2.0.0 发布后, JumpServer 版本号命名将变更为:v大版本.功能版本.Bug修复版本。比如:
|
||||
|
||||
```
|
||||
v2.0.1 是 v2.0.0 之后的Bug修复版本;
|
||||
v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
```
|
||||
|
||||
像其它优秀开源项目一样,JumpServer 每个月会发布一个功能版本,并同时维护 3 个功能版本。比如:
|
||||
|
||||
```
|
||||
在 v2.4 发布前,我们会同时维护 v2.1、v2.2、v2.3;
|
||||
在 v2.4 发布后,我们会同时维护 v2.2、v2.3、v2.4;v2.1 会停止维护。
|
||||
```
|
||||
|
||||
## 功能列表
|
||||
|
||||
@@ -180,8 +60,8 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>RADIUS 二次认证</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>登录复核(X-PACK)</td>
|
||||
<td>用户登录行为受管理员的监管与控制</td>
|
||||
<td>登录复核</td>
|
||||
<td>用户登录行为受管理员的监管与控制:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="11">账号管理<br>Account</td>
|
||||
@@ -205,23 +85,23 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>密码过期设置</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">批量改密(X-PACK)</td>
|
||||
<td>定期批量改密</td>
|
||||
<td rowspan="2">批量改密</td>
|
||||
<td>定期批量改密:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>多种密码策略</td>
|
||||
<td>多种密码策略:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>多云纳管(X-PACK)</td>
|
||||
<td>对私有云、公有云资产自动统一纳管</td>
|
||||
<td>多云纳管 </td>
|
||||
<td>对私有云、公有云资产自动统一纳管:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>收集用户(X-PACK)</td>
|
||||
<td>自定义任务定期收集主机用户</td>
|
||||
<td>收集用户 </td>
|
||||
<td>自定义任务定期收集主机用户:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>密码匣子(X-PACK)</td>
|
||||
<td>统一对资产主机的用户密码进行查看、更新、测试操作</td>
|
||||
<td>密码匣子 </td>
|
||||
<td>统一对资产主机的用户密码进行查看、更新、测试操作:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="15">授权控制<br>Authorization</td>
|
||||
@@ -246,7 +126,7 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>实现更细粒度的应用级授权</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>MySQL 数据库应用、RemoteApp 远程应用(X-PACK)</td>
|
||||
<td>MySQL 数据库应用、RemoteApp 远程应用:small_orange_diamond: </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>动作授权</td>
|
||||
@@ -273,12 +153,12 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>实现 Web SFTP 文件管理</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>工单管理(X-PACK)</td>
|
||||
<td>支持对用户登录请求行为进行控制</td>
|
||||
<td>工单管理</td>
|
||||
<td>支持对用户登录请求行为进行控制:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>组织管理(X-PACK)</td>
|
||||
<td>实现多租户管理与权限隔离</td>
|
||||
<td>组织管理</td>
|
||||
<td>实现多租户管理与权限隔离:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="7">安全审计<br>Audit</td>
|
||||
@@ -297,7 +177,7 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>支持对 Linux、Windows 等资产操作的录像进行回放审计</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>支持对 RemoteApp(X-PACK)、MySQL 等应用操作的录像进行回放审计</td>
|
||||
<td>支持对 RemoteApp:small_orange_diamond:、MySQL 等应用操作的录像进行回放审计</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>指令审计</td>
|
||||
@@ -313,7 +193,7 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>命令方式</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Web UI方式 (X-PACK)</td>
|
||||
<td>Web UI方式 :small_orange_diamond:</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
@@ -321,13 +201,13 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>MySQL</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Oracle (X-PACK)</td>
|
||||
<td>Oracle :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>MariaDB (X-PACK)</td>
|
||||
<td>MariaDB :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>PostgreSQL (X-PACK)</td>
|
||||
<td>PostgreSQL :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="6">功能亮点</td>
|
||||
@@ -357,6 +237,8 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
**说明**: 带 :small_orange_diamond: 后缀的是 X-PACK 插件有的功能
|
||||
|
||||
## 快速开始
|
||||
|
||||
- [极速安装](https://docs.jumpserver.org/zh/master/install/setup_by_fast/)
|
||||
@@ -366,9 +248,19 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
## 组件项目
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI 项目
|
||||
- [Luna](https://github.com/jumpserver/luna) JumpServer Web Terminal 项目
|
||||
- [Koko](https://github.com/jumpserver/koko) JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco)
|
||||
- [KoKo](https://github.com/jumpserver/koko) JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco)
|
||||
- [Guacamole](https://github.com/jumpserver/docker-guacamole) JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/)
|
||||
|
||||
## 贡献
|
||||
如果有你好的想法创意,或者帮助我们修复了 Bug, 欢迎提交 Pull Request
|
||||
|
||||
感谢以下贡献者,让 JumpServer 更加完善
|
||||
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/jumpserver" />
|
||||
</a>
|
||||
|
||||
|
||||
## 致谢
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web页面连接 RDP, SSH, VNC协议设备,JumpServer 图形化连接依赖
|
||||
- [OmniDB](https://omnidb.org/) Web页面连接使用数据库,JumpServer Web数据库依赖
|
||||
@@ -376,7 +268,6 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
|
||||
## JumpServer 企业版
|
||||
- [申请企业版试用](https://jinshuju.net/f/kyOYpi)
|
||||
> 注:企业版支持离线安装,申请通过后会提供高速下载链接。
|
||||
|
||||
## 案例研究
|
||||
|
||||
|
||||
390
README_EN.md
390
README_EN.md
@@ -1,143 +1,245 @@
|
||||
## Jumpserver
|
||||
# Jumpserver - The Bastion Host for Multi-Cloud Environment
|
||||
|
||||
[](https://www.python.org/)
|
||||
[](https://www.djangoproject.com/)
|
||||
[](https://hub.docker.com/u/jumpserver)
|
||||
|
||||
----
|
||||
## CRITICAL BUG WARNING
|
||||
- [中文版](https://github.com/jumpserver/jumpserver/blob/master/README.md)
|
||||
|
||||
Recently we have found a critical bug for remote execution vulnerability which leads to pre-auth and info leak, please fix it as soon as possible.
|
||||
|
||||
Thanks for **reactivity from Alibaba Hackerone bug bounty program** report us this bug
|
||||
|
||||
**Vulnerable version:**
|
||||
```
|
||||
< v2.6.2
|
||||
< v2.5.4
|
||||
< v2.4.5
|
||||
= v1.5.9
|
||||
>= v1.5.3
|
||||
```
|
||||
|
||||
**Safe and Stable version:**
|
||||
```
|
||||
>= v2.6.2
|
||||
>= v2.5.4
|
||||
>= v2.4.5
|
||||
= v1.5.9 (version tag didn't change)
|
||||
< v1.5.3
|
||||
```
|
||||
|
||||
**Bug Fix Solution:**
|
||||
Upgrade to the latest version or the version mentioned above
|
||||
|
||||
|
||||
**Temporary Solution (upgrade asap):**
|
||||
|
||||
Modify the Nginx config file and disable the vulnerable api listed below
|
||||
|
||||
```
|
||||
/api/v1/authentication/connection-token/
|
||||
/api/v1/users/connection-token/
|
||||
```
|
||||
|
||||
Path to Nginx config file
|
||||
|
||||
```
|
||||
# Previous Community version
|
||||
/etc/nginx/conf.d/jumpserver.conf
|
||||
|
||||
# Previous Enterprise version
|
||||
jumpserver-release/nginx/http_server.conf
|
||||
|
||||
# Latest version
|
||||
jumpserver-release/compose/config_static/http_server.conf
|
||||
```
|
||||
|
||||
Changes in Nginx config file
|
||||
|
||||
```
|
||||
### Put the following code on top of location server, or before /api and /
|
||||
location /api/v1/authentication/connection-token/ {
|
||||
return 403;
|
||||
}
|
||||
|
||||
location /api/v1/users/connection-token/ {
|
||||
return 403;
|
||||
}
|
||||
### End right here
|
||||
|
||||
location /api/ {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://core:8080;
|
||||
}
|
||||
|
||||
...
|
||||
```
|
||||
|
||||
Save the file and restart Nginx
|
||||
|
||||
```
|
||||
docker deployment:
|
||||
$ docker restart jms_nginx
|
||||
|
||||
rpm or other deployment:
|
||||
$ systemctl restart nginx
|
||||
|
||||
```
|
||||
|
||||
**Bug Fix Verification**
|
||||
|
||||
```
|
||||
# Download the following script to check if it is fixed
|
||||
$ wget https://github.com/jumpserver/jumpserver/releases/download/v2.6.2/jms_bug_check.sh
|
||||
|
||||
# Run the code to verify it
|
||||
$ bash jms_bug_check.sh demo.jumpserver.org
|
||||
漏洞已修复 (It means the bug is fixed)
|
||||
漏洞未修复 (It means the bug is not fixed and the system is still vulnerable)
|
||||
```
|
||||
|
||||
|
||||
**Attack Simulation**
|
||||
|
||||
Go to the logs directory which should contain gunicorn.log file. Then download the "attack" script and execute it
|
||||
|
||||
```
|
||||
$ pwd
|
||||
/opt/jumpserver/core/logs
|
||||
|
||||
$ ls gunicorn.log
|
||||
gunicorn.log
|
||||
|
||||
$ wget 'https://github.com/jumpserver/jumpserver/releases/download/v2.6.2/jms_check_attack.sh'
|
||||
$ bash jms_check_attack.sh
|
||||
系统未被入侵 (It means the system is safe)
|
||||
系统已被入侵 (It means the system is being attacked)
|
||||
```
|
||||
|Security Notice|
|
||||
|------------------|
|
||||
|On 15th January 2021, JumpServer found a critical bug for remote execution vulnerability. Please fix it asap! [For more detail](https://github.com/jumpserver/jumpserver/issues/5533) Thanks for **reactivity of Alibaba Hackerone bug bounty program** report use the bug|
|
||||
|
||||
--------------------------
|
||||
|
||||
----
|
||||
|
||||
- [中文版](https://github.com/jumpserver/jumpserver/blob/master/README.md)
|
||||
|
||||
Jumpserver is the world's first open-source PAM (Privileged Access Management System) and is licensed under the GNU GPL v2.0. It is a 4A-compliant professional operation and maintenance security audit system.
|
||||
Jumpserver is the world's first open-source Bastion Host and is licensed under the GNU GPL v2.0. It is a 4A-compliant professional operation and maintenance security audit system.
|
||||
|
||||
Jumpserver uses Python / Django for development, follows Web 2.0 specifications, and is equipped with an industry-leading Web Terminal solution that provides a beautiful user interface and great user experience
|
||||
|
||||
Jumpserver adopts a distributed architecture to support multi-branch deployment across multiple cross-regional areas. The central node provides APIs, and login nodes are deployed in each branch. It can be scaled horizontally without concurrency restrictions.
|
||||
|
||||
Change the world, starting from little things.
|
||||
Change the world by taking every little step
|
||||
|
||||
----
|
||||
### Advantages
|
||||
|
||||
### Features
|
||||
- Open Source: huge transparency and free to access with quick installation process.
|
||||
- Distributed: support large-scale concurrent access with ease.
|
||||
- No Plugin required: all you need is a browser, the ultimate Web Terminal experience.
|
||||
- Multi-Cloud supported: a unified system to manage assets on different clouds at the same time
|
||||
- Cloud storage: audit records are stored in the cloud. Data lost no more!
|
||||
- Multi-Tenant system: multiple subsidiary companies or departments access the same system simultaneously.
|
||||
- Many applications supported: link to databases, windows remote applications, and Kubernetes cluster, etc.
|
||||
|
||||

|
||||
## Features List
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td rowspan="8">Authentication</td>
|
||||
<td rowspan="5">Login</td>
|
||||
<td>Unified way to access and authenticate resources</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>LDAP/AD Authentication</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RADIUS Authentication</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>OpenID Authentication(Single Sign-On)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CAS Authentication (Single Sign-On)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">MFA (Multi-Factor Authentication)</td>
|
||||
<td>Use Google Authenticator for MFA</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RADIUS (Remote Authentication Dial In User Service)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Login Supervision</td>
|
||||
<td>Any user’s login behavior is supervised and controlled by the administrator:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="11">Accounting</td>
|
||||
<td rowspan="2">Centralized Accounts Management</td>
|
||||
<td>Admin Users management</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>System Users management</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="4">Unified Password Management</td>
|
||||
<td>Asset password custody (a matrix storing all asset password with dense security)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Auto-generated passwords</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Automatic password handling (auto login assets)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Password expiration settings</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">Password change Schedular</td>
|
||||
<td>Support regular batch Linux/Windows assets password changing:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Implement multiple password strategies:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Multi-Cloud Management</td>
|
||||
<td>Automatically manage private cloud and public cloud assets in a unified platform :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Users Acquisition </td>
|
||||
<td>Create regular custom tasks to collect system users in selected assets to identify and track the privileges ownership:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Password Vault </td>
|
||||
<td>Unified operations to check, update, and test system user password to prevent stealing or unauthorised sharing of passwords:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="15">Authorization</td>
|
||||
<td>Multi-Dimensional</td>
|
||||
<td>Granting users or user groups to access assets, asset nodes, or applications through system users. Providing precise access control to different roles of users</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="4">Assets</td>
|
||||
<td>Assets are arranged and displayed in a tree structure </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Assets and Nodes have immense flexibility for authorizing</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Assets in nodes inherit authorization automatically</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>child nodes automatically inherit authorization from parent nodes</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">Application</td>
|
||||
<td>Provides granular access control for privileged users on application level to protect from unauthorized access and unintentional errors</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Database applications (MySQL, Oracle, PostgreSQL, MariaDB, etc.) and Remote App:small_orange_diamond: </td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Actions</td>
|
||||
<td>Deeper restriction on the control of file upload, download and connection actions of authorized assets. Control the permission of clipboard copy/paste (from outer terminal to current asset)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Time Bound</td>
|
||||
<td>Sharply limited the available (accessible) time for account access to the authorized resources to reduce the risk and attack surface drastically</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Privileged Assignment</td>
|
||||
<td>Assign the denied/allowed command lists to different system users as privilege elevation, with the latter taking the form of allowing particular commands to be run with a higher level of privileges. (Minimize insider threat)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Command Filtering</td>
|
||||
<td>Creating list of restriction commands that you would like to assign to different authorized system users for filtering purpose</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File Transfer and Management</td>
|
||||
<td>Support SFTP file upload/download</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File Management</td>
|
||||
<td>Provide a Web UI for SFTP file management</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Workflow Management</td>
|
||||
<td>Manage user login confirmation requests and assets or applications authorization requests for Just-In-Time Privileges functionality:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Group Management </td>
|
||||
<td>Establishing a multi-tenant ecosystem that able authority isolation to keep malicious actors away from sensitive administrative backends:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="8">Auditing</td>
|
||||
<td>Operations</td>
|
||||
<td>Auditing user operation behaviors for any access or usage of given privileged accounts</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">Session</td>
|
||||
<td>Support real-time session audit</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Full history of all previous session audits</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="3">Video</td>
|
||||
<td>Complete session audit and playback recordings on assets operation (Linux, Windows)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Full recordings of RemoteApp, MySQL, and Kubernetes:small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Supports uploading recordings to public clouds</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Command</td>
|
||||
<td>Command auditing on assets and applications operation. Send warning alerts when executing illegal commands</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>File Transfer</td>
|
||||
<td>Full recordings of file upload and download</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="20">Database</td>
|
||||
<td rowspan="2">How to connect</td>
|
||||
<td>Command line</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Built-in Web UI:small_orange_diamond:</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td rowspan="4">Supported Database</td>
|
||||
<td>MySQL</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Oracle :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>MariaDB :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>PostgreSQL :small_orange_diamond:</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="6">Feature Highlights</td>
|
||||
<td>Syntax highlights</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Prettier SQL formmating</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Support Shortcuts</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Support selected SQL statements</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>SQL commands history query</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Support page creation: DB, TABLE</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">Session Auditing</td>
|
||||
<td>Full records of command</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Playback videos</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
**Note**: Rows with :small_orange_diamond: at the end of the sentence means that it is X-PACK features exclusive ([Apply for X-PACK Trial](https://jinshuju.net/f/kyOYpi))
|
||||
|
||||
### Start
|
||||
|
||||
@@ -162,6 +264,50 @@ We provide the SDK for your other systems to quickly interact with the Jumpserve
|
||||
- [Python](https://github.com/jumpserver/jumpserver-python-sdk) Jumpserver other components use this SDK to complete the interaction.
|
||||
- [Java](https://github.com/KaiJunYan/jumpserver-java-sdk.git) Thanks to 恺珺 for providing his Java SDK vesrion.
|
||||
|
||||
## JumpServer Component Projects
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI
|
||||
- [Luna](https://github.com/jumpserver/luna) JumpServer Web Terminal
|
||||
- [KoKo](https://github.com/jumpserver/koko) JumpServer Character protocaol Connector, replace original Python Version [Coco](https://github.com/jumpserver/coco)
|
||||
- [Guacamole](https://github.com/jumpserver/docker-guacamole) JumpServer Graphics protocol Connector,rely on [Apache Guacamole](https://guacamole.apache.org/)
|
||||
|
||||
## Contribution
|
||||
If you have any good ideas or helping us to fix bugs, please submit a Pull Request and accept our thanks :)
|
||||
|
||||
Thanks to the following contributors for making JumpServer better everyday!
|
||||
|
||||
<a href="https://github.com/jumpserver/jumpserver/graphs/contributors">
|
||||
<img src="https://contrib.rocks/image?repo=jumpserver/jumpserver" />
|
||||
</a>
|
||||
|
||||
|
||||
## Thanks to
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web page connection RDP, SSH, VNC protocol equipment. JumpServer graphical connection dependent.
|
||||
- [OmniDB](https://omnidb.org/) Web page connection to databases. JumpServer Web database dependent.
|
||||
|
||||
|
||||
## JumpServer Enterprise Version
|
||||
- [Apply for it](https://jinshuju.net/f/kyOYpi)
|
||||
|
||||
## Case Study
|
||||
|
||||
- [JumpServer 堡垒机护航顺丰科技超大规模资产安全运维](https://blog.fit2cloud.com/?p=1147);
|
||||
- [JumpServer 堡垒机让“大智慧”的混合 IT 运维更智慧](https://blog.fit2cloud.com/?p=882);
|
||||
- [携程 JumpServer 堡垒机部署与运营实战](https://blog.fit2cloud.com/?p=851);
|
||||
- [小红书的JumpServer堡垒机大规模资产跨版本迁移之路](https://blog.fit2cloud.com/?p=516);
|
||||
- [JumpServer堡垒机助力中手游提升多云环境下安全运维能力](https://blog.fit2cloud.com/?p=732);
|
||||
- [中通快递:JumpServer主机安全运维实践](https://blog.fit2cloud.com/?p=708);
|
||||
- [东方明珠:JumpServer高效管控异构化、分布式云端资产](https://blog.fit2cloud.com/?p=687);
|
||||
- [江苏农信:JumpServer堡垒机助力行业云安全运维](https://blog.fit2cloud.com/?p=666)。
|
||||
|
||||
## For safety instructions
|
||||
|
||||
JumpServer is a security product. Please refer to [Basic Security Recommendations](https://docs.jumpserver.org/zh/master/install/install_security/) for deployment and installation.
|
||||
|
||||
If you find a security problem, please contact us directly:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
|
||||
### License & Copyright
|
||||
Copyright (c) 2014-2019 Beijing Duizhan Tech, Inc., All rights reserved.
|
||||
|
||||
3
apps/acls/admin.py
Normal file
3
apps/acls/admin.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.contrib import admin
|
||||
|
||||
# Register your models here.
|
||||
3
apps/acls/api/__init__.py
Normal file
3
apps/acls/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
19
apps/acls/api/login_acl.py
Normal file
19
apps/acls/api/login_acl.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from common.permissions import IsOrgAdmin, HasQueryParamsUserAndIsCurrentOrgMember
|
||||
from common.drf.api import JMSBulkModelViewSet
|
||||
from ..models import LoginACL
|
||||
from .. import serializers
|
||||
|
||||
__all__ = ['LoginACLViewSet', ]
|
||||
|
||||
|
||||
class LoginACLViewSet(JMSBulkModelViewSet):
|
||||
queryset = LoginACL.objects.all()
|
||||
filterset_fields = ('name', 'user', )
|
||||
search_fields = filterset_fields
|
||||
permission_classes = (IsOrgAdmin, )
|
||||
serializer_class = serializers.LoginACLSerializer
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action in ["retrieve", "list"]:
|
||||
self.permission_classes = (IsOrgAdmin, HasQueryParamsUserAndIsCurrentOrgMember)
|
||||
return super().get_permissions()
|
||||
15
apps/acls/api/login_asset_acl.py
Normal file
15
apps/acls/api/login_asset_acl.py
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from common.permissions import IsOrgAdmin
|
||||
from .. import models, serializers
|
||||
|
||||
|
||||
__all__ = ['LoginAssetACLViewSet']
|
||||
|
||||
|
||||
class LoginAssetACLViewSet(OrgBulkModelViewSet):
|
||||
model = models.LoginAssetACL
|
||||
filterset_fields = ('name', )
|
||||
search_fields = filterset_fields
|
||||
permission_classes = (IsOrgAdmin, )
|
||||
serializer_class = serializers.LoginAssetACLSerializer
|
||||
105
apps/acls/api/login_asset_check.py
Normal file
105
apps/acls/api/login_asset_check.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.generics import CreateAPIView, RetrieveDestroyAPIView
|
||||
|
||||
from common.permissions import IsAppUser
|
||||
from common.utils import reverse, lazyproperty
|
||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||
from tickets.models import Ticket
|
||||
from ..models import LoginAssetACL
|
||||
from .. import serializers
|
||||
|
||||
|
||||
__all__ = ['LoginAssetCheckAPI', 'LoginAssetConfirmStatusAPI']
|
||||
|
||||
|
||||
class LoginAssetCheckAPI(CreateAPIView):
|
||||
permission_classes = (IsAppUser, )
|
||||
serializer_class = serializers.LoginAssetCheckSerializer
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
is_need_confirm, response_data = self.check_if_need_confirm()
|
||||
return Response(data=response_data, status=200)
|
||||
|
||||
def check_if_need_confirm(self):
|
||||
queries = {
|
||||
'user': self.serializer.user, 'asset': self.serializer.asset,
|
||||
'system_user': self.serializer.system_user,
|
||||
'action': LoginAssetACL.ActionChoices.login_confirm
|
||||
}
|
||||
with tmp_to_org(self.serializer.org):
|
||||
acl = LoginAssetACL.filter(**queries).valid().first()
|
||||
|
||||
if not acl:
|
||||
is_need_confirm = False
|
||||
response_data = {}
|
||||
else:
|
||||
is_need_confirm = True
|
||||
response_data = self._get_response_data_of_need_confirm(acl)
|
||||
response_data['need_confirm'] = is_need_confirm
|
||||
return is_need_confirm, response_data
|
||||
|
||||
def _get_response_data_of_need_confirm(self, acl):
|
||||
ticket = LoginAssetACL.create_login_asset_confirm_ticket(
|
||||
user=self.serializer.user,
|
||||
asset=self.serializer.asset,
|
||||
system_user=self.serializer.system_user,
|
||||
assignees=acl.reviewers.all(),
|
||||
org_id=self.serializer.org.id
|
||||
)
|
||||
confirm_status_url = reverse(
|
||||
view_name='acls:login-asset-confirm-status',
|
||||
kwargs={'pk': str(ticket.id)}
|
||||
)
|
||||
ticket_detail_url = reverse(
|
||||
view_name='api-tickets:ticket-detail',
|
||||
kwargs={'pk': str(ticket.id)},
|
||||
external=True, api_to_ui=True
|
||||
)
|
||||
ticket_detail_url = '{url}?type={type}'.format(url=ticket_detail_url, type=ticket.type)
|
||||
data = {
|
||||
'check_confirm_status': {'method': 'GET', 'url': confirm_status_url},
|
||||
'close_confirm': {'method': 'DELETE', 'url': confirm_status_url},
|
||||
'ticket_detail_url': ticket_detail_url,
|
||||
'reviewers': [str(user) for user in ticket.assignees.all()],
|
||||
}
|
||||
return data
|
||||
|
||||
@lazyproperty
|
||||
def serializer(self):
|
||||
serializer = self.get_serializer(data=self.request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer
|
||||
|
||||
|
||||
class LoginAssetConfirmStatusAPI(RetrieveDestroyAPIView):
|
||||
permission_classes = (IsAppUser, )
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
if self.ticket.action_open:
|
||||
status = 'await'
|
||||
elif self.ticket.action_approve:
|
||||
status = 'approve'
|
||||
else:
|
||||
status = 'reject'
|
||||
data = {
|
||||
'status': status,
|
||||
'action': self.ticket.action,
|
||||
'processor': self.ticket.processor_display
|
||||
}
|
||||
return Response(data=data, status=200)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
if self.ticket.status_open:
|
||||
self.ticket.close(processor=self.ticket.applicant)
|
||||
data = {
|
||||
'action': self.ticket.action,
|
||||
'status': self.ticket.status,
|
||||
'processor': self.ticket.processor_display
|
||||
}
|
||||
return Response(data=data, status=200)
|
||||
|
||||
@lazyproperty
|
||||
def ticket(self):
|
||||
with tmp_to_root_org():
|
||||
return get_object_or_404(Ticket, pk=self.kwargs['pk'])
|
||||
5
apps/acls/apps.py
Normal file
5
apps/acls/apps.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class AclsConfig(AppConfig):
|
||||
name = 'acls'
|
||||
61
apps/acls/migrations/0001_initial.py
Normal file
61
apps/acls/migrations/0001_initial.py
Normal file
@@ -0,0 +1,61 @@
|
||||
# Generated by Django 3.1 on 2021-03-11 09:53
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import uuid
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
|
||||
dependencies = [
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='LoginACL',
|
||||
fields=[
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('ip_group', models.JSONField(default=list, verbose_name='Login IP')),
|
||||
('action', models.CharField(choices=[('reject', 'Reject'), ('allow', 'Allow')], default='reject', max_length=64, verbose_name='Action')),
|
||||
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='login_acls', to=settings.AUTH_USER_MODEL, verbose_name='User')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('priority', '-date_updated', 'name'),
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='LoginAssetACL',
|
||||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('priority', models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Active')),
|
||||
('comment', models.TextField(blank=True, default='', verbose_name='Comment')),
|
||||
('users', models.JSONField(verbose_name='User')),
|
||||
('system_users', models.JSONField(verbose_name='System User')),
|
||||
('assets', models.JSONField(verbose_name='Asset')),
|
||||
('action', models.CharField(choices=[('login_confirm', 'Login confirm')], default='login_confirm', max_length=64, verbose_name='Action')),
|
||||
('reviewers', models.ManyToManyField(blank=True, related_name='review_login_asset_acls', to=settings.AUTH_USER_MODEL, verbose_name='Reviewers')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('priority', '-date_updated', 'name'),
|
||||
'unique_together': {('name', 'org_id')},
|
||||
},
|
||||
),
|
||||
]
|
||||
0
apps/acls/migrations/__init__.py
Normal file
0
apps/acls/migrations/__init__.py
Normal file
2
apps/acls/models/__init__.py
Normal file
2
apps/acls/models/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
35
apps/acls/models/base.py
Normal file
35
apps/acls/models/base.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from common.mixins import CommonModelMixin
|
||||
|
||||
|
||||
__all__ = ['BaseACL', 'BaseACLQuerySet']
|
||||
|
||||
|
||||
class BaseACLQuerySet(models.QuerySet):
|
||||
def active(self):
|
||||
return self.filter(is_active=True)
|
||||
|
||||
def inactive(self):
|
||||
return self.filter(is_active=False)
|
||||
|
||||
def valid(self):
|
||||
return self.active()
|
||||
|
||||
def invalid(self):
|
||||
return self.inactive()
|
||||
|
||||
|
||||
class BaseACL(CommonModelMixin):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
priority = models.IntegerField(
|
||||
default=50, verbose_name=_("Priority"),
|
||||
help_text=_("1-100, the lower the value will be match first"),
|
||||
validators=[MinValueValidator(1), MaxValueValidator(100)]
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_("Active"))
|
||||
comment = models.TextField(default='', blank=True, verbose_name=_('Comment'))
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
54
apps/acls/models/login_acl.py
Normal file
54
apps/acls/models/login_acl.py
Normal file
@@ -0,0 +1,54 @@
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from .base import BaseACL, BaseACLQuerySet
|
||||
from ..utils import contains_ip
|
||||
|
||||
|
||||
class ACLManager(models.Manager):
|
||||
|
||||
def valid(self):
|
||||
return self.get_queryset().valid()
|
||||
|
||||
|
||||
class LoginACL(BaseACL):
|
||||
class ActionChoices(models.TextChoices):
|
||||
reject = 'reject', _('Reject')
|
||||
allow = 'allow', _('Allow')
|
||||
|
||||
# 条件
|
||||
ip_group = models.JSONField(default=list, verbose_name=_('Login IP'))
|
||||
# 动作
|
||||
action = models.CharField(
|
||||
max_length=64, choices=ActionChoices.choices, default=ActionChoices.reject,
|
||||
verbose_name=_('Action')
|
||||
)
|
||||
# 关联
|
||||
user = models.ForeignKey(
|
||||
'users.User', on_delete=models.CASCADE, related_name='login_acls', verbose_name=_('User')
|
||||
)
|
||||
|
||||
objects = ACLManager.from_queryset(BaseACLQuerySet)()
|
||||
|
||||
class Meta:
|
||||
ordering = ('priority', '-date_updated', 'name')
|
||||
|
||||
@property
|
||||
def action_reject(self):
|
||||
return self.action == self.ActionChoices.reject
|
||||
|
||||
@property
|
||||
def action_allow(self):
|
||||
return self.action == self.ActionChoices.allow
|
||||
|
||||
@staticmethod
|
||||
def allow_user_to_login(user, ip):
|
||||
acl = user.login_acls.valid().first()
|
||||
if not acl:
|
||||
return True
|
||||
is_contained = contains_ip(ip, acl.ip_group)
|
||||
if acl.action_allow and is_contained:
|
||||
return True
|
||||
if acl.action_reject and not is_contained:
|
||||
return True
|
||||
return False
|
||||
99
apps/acls/models/login_asset_acl.py
Normal file
99
apps/acls/models/login_asset_acl.py
Normal file
@@ -0,0 +1,99 @@
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from .base import BaseACL, BaseACLQuerySet
|
||||
from ..utils import contains_ip
|
||||
|
||||
|
||||
class ACLManager(OrgManager):
|
||||
|
||||
def valid(self):
|
||||
return self.get_queryset().valid()
|
||||
|
||||
|
||||
class LoginAssetACL(BaseACL, OrgModelMixin):
|
||||
class ActionChoices(models.TextChoices):
|
||||
login_confirm = 'login_confirm', _('Login confirm')
|
||||
|
||||
# 条件
|
||||
users = models.JSONField(verbose_name=_('User'))
|
||||
system_users = models.JSONField(verbose_name=_('System User'))
|
||||
assets = models.JSONField(verbose_name=_('Asset'))
|
||||
# 动作
|
||||
action = models.CharField(
|
||||
max_length=64, choices=ActionChoices.choices, default=ActionChoices.login_confirm,
|
||||
verbose_name=_('Action')
|
||||
)
|
||||
# 动作: 附加字段
|
||||
# - login_confirm
|
||||
reviewers = models.ManyToManyField(
|
||||
'users.User', related_name='review_login_asset_acls', blank=True,
|
||||
verbose_name=_("Reviewers")
|
||||
)
|
||||
|
||||
objects = ACLManager.from_queryset(BaseACLQuerySet)()
|
||||
|
||||
class Meta:
|
||||
unique_together = ('name', 'org_id')
|
||||
ordering = ('priority', '-date_updated', 'name')
|
||||
|
||||
@classmethod
|
||||
def filter(cls, user, asset, system_user, action):
|
||||
queryset = cls.objects.filter(action=action)
|
||||
queryset = cls.filter_user(user, queryset)
|
||||
queryset = cls.filter_asset(asset, queryset)
|
||||
queryset = cls.filter_system_user(system_user, queryset)
|
||||
return queryset
|
||||
|
||||
@classmethod
|
||||
def filter_user(cls, user, queryset):
|
||||
queryset = queryset.filter(
|
||||
Q(users__username_group__contains=user.username) |
|
||||
Q(users__username_group__contains='*')
|
||||
)
|
||||
return queryset
|
||||
|
||||
@classmethod
|
||||
def filter_asset(cls, asset, queryset):
|
||||
queryset = queryset.filter(
|
||||
Q(assets__hostname_group__contains=asset.hostname) |
|
||||
Q(assets__hostname_group__contains='*')
|
||||
)
|
||||
ids = [q.id for q in queryset if contains_ip(asset.ip, q.assets.get('ip_group', []))]
|
||||
queryset = cls.objects.filter(id__in=ids)
|
||||
return queryset
|
||||
|
||||
@classmethod
|
||||
def filter_system_user(cls, system_user, queryset):
|
||||
queryset = queryset.filter(
|
||||
Q(system_users__name_group__contains=system_user.name) |
|
||||
Q(system_users__name_group__contains='*')
|
||||
).filter(
|
||||
Q(system_users__username_group__contains=system_user.username) |
|
||||
Q(system_users__username_group__contains='*')
|
||||
).filter(
|
||||
Q(system_users__protocol_group__contains=system_user.protocol) |
|
||||
Q(system_users__protocol_group__contains='*')
|
||||
)
|
||||
return queryset
|
||||
|
||||
@classmethod
|
||||
def create_login_asset_confirm_ticket(cls, user, asset, system_user, assignees, org_id):
|
||||
from tickets.const import TicketTypeChoices
|
||||
from tickets.models import Ticket
|
||||
data = {
|
||||
'title': _('Login asset confirm') + ' ({})'.format(user),
|
||||
'type': TicketTypeChoices.login_asset_confirm,
|
||||
'meta': {
|
||||
'apply_login_user': str(user),
|
||||
'apply_login_asset': str(asset),
|
||||
'apply_login_system_user': str(system_user),
|
||||
},
|
||||
'org_id': org_id,
|
||||
}
|
||||
ticket = Ticket.objects.create(**data)
|
||||
ticket.assignees.set(assignees)
|
||||
ticket.open(applicant=user)
|
||||
return ticket
|
||||
|
||||
3
apps/acls/serializers/__init__.py
Normal file
3
apps/acls/serializers/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .login_acl import *
|
||||
from .login_asset_acl import *
|
||||
from .login_asset_check import *
|
||||
54
apps/acls/serializers/login_acl.py
Normal file
54
apps/acls/serializers/login_acl.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
from common.drf.serializers import BulkModelSerializer
|
||||
from orgs.utils import current_org
|
||||
from ..models import LoginACL
|
||||
from ..utils import is_ip_address, is_ip_network, is_ip_segment
|
||||
|
||||
|
||||
__all__ = ['LoginACLSerializer', ]
|
||||
|
||||
|
||||
def ip_group_child_validator(ip_group_child):
|
||||
is_valid = ip_group_child == '*' \
|
||||
or is_ip_address(ip_group_child) \
|
||||
or is_ip_network(ip_group_child) \
|
||||
or is_ip_segment(ip_group_child)
|
||||
if not is_valid:
|
||||
error = _('IP address invalid: `{}`').format(ip_group_child)
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
|
||||
class LoginACLSerializer(BulkModelSerializer):
|
||||
ip_group_help_text = _(
|
||||
'Format for comma-delimited string, with * indicating a match all. '
|
||||
'Such as: '
|
||||
'192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64 '
|
||||
)
|
||||
|
||||
ip_group = serializers.ListField(
|
||||
default=['*'], label=_('IP'), help_text=ip_group_help_text,
|
||||
child=serializers.CharField(max_length=1024, validators=[ip_group_child_validator])
|
||||
)
|
||||
user_display = serializers.ReadOnlyField(source='user.name', label=_('User'))
|
||||
action_display = serializers.ReadOnlyField(source='get_action_display', label=_('Action'))
|
||||
|
||||
class Meta:
|
||||
model = LoginACL
|
||||
fields = [
|
||||
'id', 'name', 'priority', 'ip_group', 'user', 'user_display', 'action',
|
||||
'action_display', 'is_active', 'comment', 'created_by', 'date_created', 'date_updated'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'priority': {'default': 50},
|
||||
'is_active': {'default': True},
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def validate_user(user):
|
||||
if user not in current_org.get_members():
|
||||
error = _('The user `{}` is not in the current organization: `{}`').format(
|
||||
user, current_org
|
||||
)
|
||||
raise serializers.ValidationError(error)
|
||||
return user
|
||||
101
apps/acls/serializers/login_asset_acl.py
Normal file
101
apps/acls/serializers/login_asset_acl.py
Normal file
@@ -0,0 +1,101 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from assets.models import SystemUser
|
||||
from acls import models
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
__all__ = ['LoginAssetACLSerializer']
|
||||
|
||||
|
||||
common_help_text = _('Format for comma-delimited string, with * indicating a match all. ')
|
||||
|
||||
|
||||
class LoginAssetACLUsersSerializer(serializers.Serializer):
|
||||
username_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Username'),
|
||||
help_text=common_help_text
|
||||
)
|
||||
|
||||
|
||||
class LoginAssetACLAssestsSerializer(serializers.Serializer):
|
||||
ip_group_help_text = _(
|
||||
'Format for comma-delimited string, with * indicating a match all. '
|
||||
'Such as: '
|
||||
'192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64 '
|
||||
'(Domain name support)'
|
||||
)
|
||||
|
||||
ip_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=1024), label=_('IP'),
|
||||
help_text=ip_group_help_text
|
||||
)
|
||||
hostname_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Hostname'),
|
||||
help_text=common_help_text
|
||||
)
|
||||
|
||||
|
||||
class LoginAssetACLSystemUsersSerializer(serializers.Serializer):
|
||||
protocol_group_help_text = _(
|
||||
'Format for comma-delimited string, with * indicating a match all. '
|
||||
'Protocol options: {}'
|
||||
)
|
||||
|
||||
name_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Name'),
|
||||
help_text=common_help_text
|
||||
)
|
||||
username_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=128), label=_('Username'),
|
||||
help_text=common_help_text
|
||||
)
|
||||
protocol_group = serializers.ListField(
|
||||
default=['*'], child=serializers.CharField(max_length=16), label=_('Protocol'),
|
||||
help_text=protocol_group_help_text.format(
|
||||
', '.join(SystemUser.ASSET_CATEGORY_PROTOCOLS)
|
||||
)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def validate_protocol_group(protocol_group):
|
||||
unsupported_protocols = set(protocol_group) - set(SystemUser.ASSET_CATEGORY_PROTOCOLS + ['*'])
|
||||
if unsupported_protocols:
|
||||
error = _('Unsupported protocols: {}').format(unsupported_protocols)
|
||||
raise serializers.ValidationError(error)
|
||||
return protocol_group
|
||||
|
||||
|
||||
class LoginAssetACLSerializer(BulkOrgResourceModelSerializer):
|
||||
users = LoginAssetACLUsersSerializer()
|
||||
assets = LoginAssetACLAssestsSerializer()
|
||||
system_users = LoginAssetACLSystemUsersSerializer()
|
||||
reviewers_amount = serializers.IntegerField(read_only=True, source='reviewers.count')
|
||||
action_display = serializers.ReadOnlyField(source='get_action_display', label=_('Action'))
|
||||
|
||||
class Meta:
|
||||
model = models.LoginAssetACL
|
||||
fields = [
|
||||
'id', 'name', 'priority', 'users', 'system_users', 'assets', 'action', 'action_display',
|
||||
'is_active', 'comment', 'reviewers', 'reviewers_amount', 'created_by', 'date_created',
|
||||
'date_updated', 'org_id'
|
||||
]
|
||||
extra_kwargs = {
|
||||
"reviewers": {'allow_null': False, 'required': True},
|
||||
'priority': {'default': 50},
|
||||
'is_active': {'default': True},
|
||||
}
|
||||
|
||||
def validate_reviewers(self, reviewers):
|
||||
org_id = self.fields['org_id'].default()
|
||||
org = Organization.get_instance(org_id)
|
||||
if not org:
|
||||
error = _('The organization `{}` does not exist'.format(org_id))
|
||||
raise serializers.ValidationError(error)
|
||||
users = org.get_members()
|
||||
valid_reviewers = list(set(reviewers) & set(users))
|
||||
if not valid_reviewers:
|
||||
error = _('None of the reviewers belong to Organization `{}`'.format(org.name))
|
||||
raise serializers.ValidationError(error)
|
||||
return valid_reviewers
|
||||
71
apps/acls/serializers/login_asset_check.py
Normal file
71
apps/acls/serializers/login_asset_check.py
Normal file
@@ -0,0 +1,71 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
from orgs.utils import tmp_to_root_org
|
||||
from common.utils import get_object_or_none, lazyproperty
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser
|
||||
|
||||
|
||||
__all__ = ['LoginAssetCheckSerializer']
|
||||
|
||||
|
||||
class LoginAssetCheckSerializer(serializers.Serializer):
|
||||
user_id = serializers.UUIDField(required=True, allow_null=False)
|
||||
asset_id = serializers.UUIDField(required=True, allow_null=False)
|
||||
system_user_id = serializers.UUIDField(required=True, allow_null=False)
|
||||
system_user_username = serializers.CharField(max_length=128, default='')
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.user = None
|
||||
self.asset = None
|
||||
self._system_user = None
|
||||
self._system_user_username = None
|
||||
|
||||
def validate_user_id(self, user_id):
|
||||
self.user = self.validate_object_exist(User, user_id)
|
||||
return user_id
|
||||
|
||||
def validate_asset_id(self, asset_id):
|
||||
self.asset = self.validate_object_exist(Asset, asset_id)
|
||||
return asset_id
|
||||
|
||||
def validate_system_user_id(self, system_user_id):
|
||||
self._system_user = self.validate_object_exist(SystemUser, system_user_id)
|
||||
return system_user_id
|
||||
|
||||
def validate_system_user_username(self, system_user_username):
|
||||
system_user_id = self.initial_data.get('system_user_id')
|
||||
system_user = self.validate_object_exist(SystemUser, system_user_id)
|
||||
if self._system_user.login_mode == SystemUser.LOGIN_MANUAL \
|
||||
and not system_user.username \
|
||||
and not system_user.username_same_with_user \
|
||||
and not system_user_username:
|
||||
error = 'Missing parameter: system_user_username'
|
||||
raise serializers.ValidationError(error)
|
||||
self._system_user_username = system_user_username
|
||||
return system_user_username
|
||||
|
||||
@staticmethod
|
||||
def validate_object_exist(model, field_id):
|
||||
with tmp_to_root_org():
|
||||
obj = get_object_or_none(model, pk=field_id)
|
||||
if not obj:
|
||||
error = '{} Model object does not exist'.format(model.__name__)
|
||||
raise serializers.ValidationError(error)
|
||||
return obj
|
||||
|
||||
@lazyproperty
|
||||
def system_user(self):
|
||||
if self._system_user.username_same_with_user:
|
||||
username = self.user.username
|
||||
elif self._system_user.login_mode == SystemUser.LOGIN_MANUAL:
|
||||
username = self._system_user_username
|
||||
else:
|
||||
username = self._system_user.username
|
||||
self._system_user.username = username
|
||||
return self._system_user
|
||||
|
||||
@lazyproperty
|
||||
def org(self):
|
||||
return self.asset.org
|
||||
3
apps/acls/tests.py
Normal file
3
apps/acls/tests.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from django.test import TestCase
|
||||
|
||||
# Create your tests here.
|
||||
1
apps/acls/urls/__init__.py
Normal file
1
apps/acls/urls/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
from .api_urls import *
|
||||
18
apps/acls/urls/api_urls.py
Normal file
18
apps/acls/urls/api_urls.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from django.urls import path
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
from .. import api
|
||||
|
||||
|
||||
app_name = 'acls'
|
||||
|
||||
|
||||
router = BulkRouter()
|
||||
router.register(r'login-acls', api.LoginACLViewSet, 'login-acl')
|
||||
router.register(r'login-asset-acls', api.LoginAssetACLViewSet, 'login-asset-acl')
|
||||
|
||||
urlpatterns = [
|
||||
path('login-asset/check/', api.LoginAssetCheckAPI.as_view(), name='login-asset-check'),
|
||||
path('login-asset-confirm/<uuid:pk>/status/', api.LoginAssetConfirmStatusAPI.as_view(), name='login-asset-confirm-status')
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
68
apps/acls/utils.py
Normal file
68
apps/acls/utils.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from ipaddress import ip_network, ip_address
|
||||
|
||||
|
||||
def is_ip_address(address):
|
||||
""" 192.168.10.1 """
|
||||
try:
|
||||
ip_address(address)
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def is_ip_network(ip):
|
||||
""" 192.168.1.0/24 """
|
||||
try:
|
||||
ip_network(ip)
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def is_ip_segment(ip):
|
||||
""" 10.1.1.1-10.1.1.20 """
|
||||
if '-' not in ip:
|
||||
return False
|
||||
ip_address1, ip_address2 = ip.split('-')
|
||||
return is_ip_address(ip_address1) and is_ip_address(ip_address2)
|
||||
|
||||
|
||||
def in_ip_segment(ip, ip_segment):
|
||||
ip1, ip2 = ip_segment.split('-')
|
||||
ip1 = int(ip_address(ip1))
|
||||
ip2 = int(ip_address(ip2))
|
||||
ip = int(ip_address(ip))
|
||||
return min(ip1, ip2) <= ip <= max(ip1, ip2)
|
||||
|
||||
|
||||
def contains_ip(ip, ip_group):
|
||||
"""
|
||||
ip_group:
|
||||
[192.168.10.1, 192.168.1.0/24, 10.1.1.1-10.1.1.20, 2001:db8:2de::e13, 2001:db8:1a:1110::/64.]
|
||||
|
||||
"""
|
||||
|
||||
if '*' in ip_group:
|
||||
return True
|
||||
|
||||
for _ip in ip_group:
|
||||
if is_ip_address(_ip):
|
||||
# 192.168.10.1
|
||||
if ip == _ip:
|
||||
return True
|
||||
elif is_ip_network(_ip) and is_ip_address(ip):
|
||||
# 192.168.1.0/24
|
||||
if ip_address(ip) in ip_network(_ip):
|
||||
return True
|
||||
elif is_ip_segment(_ip) and is_ip_address(ip):
|
||||
# 10.1.1.1-10.1.1.20
|
||||
if in_ip_segment(ip, _ip):
|
||||
return True
|
||||
else:
|
||||
# is domain name
|
||||
if ip == _ip:
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -77,8 +77,8 @@ class SerializeApplicationToTreeNodeMixin:
|
||||
|
||||
@staticmethod
|
||||
def filter_organizations(applications):
|
||||
organizations_id = set(applications.values_list('org_id', flat=True))
|
||||
organizations = [Organization.get_instance(org_id) for org_id in organizations_id]
|
||||
organization_ids = set(applications.values_list('org_id', flat=True))
|
||||
organizations = [Organization.get_instance(org_id) for org_id in organization_ids]
|
||||
return organizations
|
||||
|
||||
def serialize_applications_with_org(self, applications):
|
||||
|
||||
@@ -3,6 +3,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from common.mixins import CommonModelMixin
|
||||
from assets.models import Asset
|
||||
from .. import const
|
||||
|
||||
|
||||
@@ -35,3 +36,35 @@ class Application(CommonModelMixin, OrgModelMixin):
|
||||
@property
|
||||
def category_remote_app(self):
|
||||
return self.category == const.ApplicationCategoryChoices.remote_app.value
|
||||
|
||||
def get_rdp_remote_app_setting(self):
|
||||
from applications.serializers.attrs import get_serializer_class_by_application_type
|
||||
if not self.category_remote_app:
|
||||
raise ValueError(f"Not a remote app application: {self.name}")
|
||||
serializer_class = get_serializer_class_by_application_type(self.type)
|
||||
fields = serializer_class().get_fields()
|
||||
|
||||
parameters = [self.type]
|
||||
for field_name in list(fields.keys()):
|
||||
if field_name in ['asset']:
|
||||
continue
|
||||
value = self.attrs.get(field_name)
|
||||
if not value:
|
||||
continue
|
||||
if field_name == 'path':
|
||||
value = '\"%s\"' % value
|
||||
parameters.append(str(value))
|
||||
|
||||
parameters = ' '.join(parameters)
|
||||
return {
|
||||
'program': '||jmservisor',
|
||||
'working_directory': '',
|
||||
'parameters': parameters
|
||||
}
|
||||
|
||||
def get_remote_app_asset(self):
|
||||
asset_id = self.attrs.get('asset')
|
||||
if not asset_id:
|
||||
raise ValueError("Remote App not has asset attr")
|
||||
asset = Asset.objects.filter(id=asset_id).first()
|
||||
return asset
|
||||
|
||||
@@ -44,8 +44,8 @@ class ApplicationSerializerMixin(serializers.Serializer):
|
||||
|
||||
|
||||
class ApplicationSerializer(ApplicationSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
category_display = serializers.ReadOnlyField(source='get_category_display', label=_('Category'))
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type'))
|
||||
category_display = serializers.ReadOnlyField(source='get_category_display', label=_('Category(Display)'))
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type(Dispaly)'))
|
||||
|
||||
class Meta:
|
||||
model = models.Application
|
||||
|
||||
@@ -27,31 +27,5 @@ class RemoteAppConnectionInfoSerializer(serializers.ModelSerializer):
|
||||
return obj.attrs.get('asset')
|
||||
|
||||
@staticmethod
|
||||
def get_parameters(obj):
|
||||
"""
|
||||
返回Guacamole需要的RemoteApp配置参数信息中的parameters参数
|
||||
"""
|
||||
from .attrs import get_serializer_class_by_application_type
|
||||
serializer_class = get_serializer_class_by_application_type(obj.type)
|
||||
fields = serializer_class().get_fields()
|
||||
|
||||
parameters = [obj.type]
|
||||
for field_name in list(fields.keys()):
|
||||
if field_name in ['asset']:
|
||||
continue
|
||||
value = obj.attrs.get(field_name)
|
||||
if not value:
|
||||
continue
|
||||
if field_name == 'path':
|
||||
value = '\"%s\"' % value
|
||||
parameters.append(str(value))
|
||||
|
||||
parameters = ' '.join(parameters)
|
||||
return parameters
|
||||
|
||||
def get_parameter_remote_app(self, obj):
|
||||
return {
|
||||
'program': '||jmservisor',
|
||||
'working_directory': '',
|
||||
'parameters': self.get_parameters(obj)
|
||||
}
|
||||
def get_parameter_remote_app(obj):
|
||||
return obj.get_rdp_remote_app_setting()
|
||||
|
||||
@@ -33,6 +33,10 @@ class AdminUserViewSet(OrgBulkModelViewSet):
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.AdminUserSerializer
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_classes = {
|
||||
'default': serializers.AdminUserSerializer,
|
||||
'retrieve': serializers.AdminUserDetailSerializer,
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
from assets.api import FilterAssetByNodeMixin
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
from rest_framework.generics import RetrieveAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from django.shortcuts import get_object_or_404
|
||||
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
|
||||
@@ -1,14 +1,15 @@
|
||||
from typing import List
|
||||
|
||||
from common.utils.common import timeit
|
||||
from assets.models import Node, Asset
|
||||
from assets.pagination import AssetLimitOffsetPagination
|
||||
from common.utils import lazyproperty, dict_get_any, is_uuid, get_object_or_none
|
||||
from assets.pagination import NodeAssetTreePagination
|
||||
from common.utils import lazyproperty
|
||||
from assets.utils import get_node, is_query_node_all_assets
|
||||
|
||||
|
||||
class SerializeToTreeNodeMixin:
|
||||
permission_classes = ()
|
||||
|
||||
@timeit
|
||||
def serialize_nodes(self, nodes: List[Node], with_asset_amount=False):
|
||||
if with_asset_amount:
|
||||
def _name(node: Node):
|
||||
@@ -45,6 +46,7 @@ class SerializeToTreeNodeMixin:
|
||||
return platform
|
||||
return default
|
||||
|
||||
@timeit
|
||||
def serialize_assets(self, assets, node_key=None):
|
||||
if node_key is None:
|
||||
get_pid = lambda asset: getattr(asset, 'parent_key', '')
|
||||
@@ -79,7 +81,7 @@ class SerializeToTreeNodeMixin:
|
||||
|
||||
|
||||
class FilterAssetByNodeMixin:
|
||||
pagination_class = AssetLimitOffsetPagination
|
||||
pagination_class = NodeAssetTreePagination
|
||||
|
||||
@lazyproperty
|
||||
def is_query_node_all_assets(self):
|
||||
|
||||
@@ -8,7 +8,6 @@ from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.shortcuts import get_object_or_404, Http404
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.db.models.signals import m2m_changed
|
||||
|
||||
from common.const.http import POST
|
||||
@@ -17,20 +16,19 @@ from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||
from assets.models import Asset
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.tree import TreeNodeSerializer
|
||||
from common.const.distributed_lock_key import UPDATE_NODE_TREE_LOCK_KEY
|
||||
from orgs.mixins.api import OrgModelViewSet
|
||||
from orgs.mixins import generics
|
||||
from orgs.lock import org_level_transaction_lock
|
||||
from orgs.utils import current_org
|
||||
from assets.tasks import check_node_assets_amount_task
|
||||
from ..hands import IsOrgAdmin
|
||||
from ..models import Node
|
||||
from ..tasks import (
|
||||
update_node_assets_hardware_info_manual,
|
||||
test_node_assets_connectivity_manual,
|
||||
check_node_assets_amount_task
|
||||
)
|
||||
from .. import serializers
|
||||
from .mixin import SerializeToTreeNodeMixin
|
||||
from assets.locks import NodeAddChildrenLock
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -50,17 +48,17 @@ class NodeViewSet(OrgModelViewSet):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.NodeSerializer
|
||||
|
||||
@action(methods=[POST], detail=False, url_name='launch-check-assets-amount-task')
|
||||
def launch_check_assets_amount_task(self, request):
|
||||
task = check_node_assets_amount_task.delay(current_org.id)
|
||||
return Response(data={'task': task.id})
|
||||
|
||||
# 仅支持根节点指直接创建,子节点下的节点需要通过children接口创建
|
||||
def perform_create(self, serializer):
|
||||
child_key = Node.org_root().get_next_child_key()
|
||||
serializer.validated_data["key"] = child_key
|
||||
serializer.save()
|
||||
|
||||
@action(methods=[POST], detail=False, url_path='check_assets_amount_task')
|
||||
def check_assets_amount_task(self, request):
|
||||
task = check_node_assets_amount_task.delay(current_org.id)
|
||||
return Response(data={'task': task.id})
|
||||
|
||||
def perform_update(self, serializer):
|
||||
node = self.get_object()
|
||||
if node.is_org_root() and node.value != serializer.validated_data['value']:
|
||||
@@ -117,22 +115,27 @@ class NodeChildrenApi(generics.ListCreateAPIView):
|
||||
return super().initial(request, *args, **kwargs)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
data = serializer.validated_data
|
||||
_id = data.get("id")
|
||||
value = data.get("value")
|
||||
if not value:
|
||||
value = self.instance.get_next_child_preset_name()
|
||||
node = self.instance.create_child(value=value, _id=_id)
|
||||
# 避免查询 full value
|
||||
node._full_value = node.value
|
||||
serializer.instance = node
|
||||
with NodeAddChildrenLock(self.instance):
|
||||
data = serializer.validated_data
|
||||
_id = data.get("id")
|
||||
value = data.get("value")
|
||||
if not value:
|
||||
value = self.instance.get_next_child_preset_name()
|
||||
node = self.instance.create_child(value=value, _id=_id)
|
||||
# 避免查询 full value
|
||||
node._full_value = node.value
|
||||
serializer.instance = node
|
||||
|
||||
def get_object(self):
|
||||
pk = self.kwargs.get('pk') or self.request.query_params.get('id')
|
||||
key = self.request.query_params.get("key")
|
||||
|
||||
if not pk and not key:
|
||||
node = Node.org_root()
|
||||
self.is_initial = True
|
||||
if current_org.is_root():
|
||||
node = None
|
||||
else:
|
||||
node = Node.org_root()
|
||||
return node
|
||||
if pk:
|
||||
node = get_object_or_404(Node, pk=pk)
|
||||
@@ -140,16 +143,26 @@ class NodeChildrenApi(generics.ListCreateAPIView):
|
||||
node = get_object_or_404(Node, key=key)
|
||||
return node
|
||||
|
||||
def get_org_root_queryset(self, query_all):
|
||||
if query_all:
|
||||
return Node.objects.all()
|
||||
else:
|
||||
return Node.org_root_nodes()
|
||||
|
||||
def get_queryset(self):
|
||||
query_all = self.request.query_params.get("all", "0") == "all"
|
||||
if not self.instance:
|
||||
return Node.objects.none()
|
||||
|
||||
if self.is_initial and current_org.is_root():
|
||||
return self.get_org_root_queryset(query_all)
|
||||
|
||||
if self.is_initial:
|
||||
with_self = True
|
||||
else:
|
||||
with_self = False
|
||||
|
||||
if not self.instance:
|
||||
return Node.objects.none()
|
||||
|
||||
if query_all:
|
||||
queryset = self.instance.get_all_children(with_self=with_self)
|
||||
else:
|
||||
@@ -181,12 +194,12 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||
|
||||
def get_assets(self):
|
||||
include_assets = self.request.query_params.get('assets', '0') == '1'
|
||||
if not include_assets:
|
||||
if not self.instance or not include_assets:
|
||||
return []
|
||||
assets = self.instance.get_assets().only(
|
||||
"id", "hostname", "ip", "os",
|
||||
"org_id", "protocols", "is_active"
|
||||
)
|
||||
"id", "hostname", "ip", "os", "platform_id",
|
||||
"org_id", "protocols", "is_active",
|
||||
).prefetch_related('platform')
|
||||
return self.serialize_assets(assets, self.instance.key)
|
||||
|
||||
|
||||
@@ -210,17 +223,16 @@ class NodeAddChildrenApi(generics.UpdateAPIView):
|
||||
serializer_class = serializers.NodeAddChildrenSerializer
|
||||
instance = None
|
||||
|
||||
def put(self, request, *args, **kwargs):
|
||||
def update(self, request, *args, **kwargs):
|
||||
""" 同时支持 put 和 patch 方法"""
|
||||
instance = self.get_object()
|
||||
nodes_id = request.data.get("nodes")
|
||||
children = Node.objects.filter(id__in=nodes_id)
|
||||
node_ids = request.data.get("nodes")
|
||||
children = Node.objects.filter(id__in=node_ids)
|
||||
for node in children:
|
||||
node.parent = instance
|
||||
return Response("OK")
|
||||
|
||||
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='patch')
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='put')
|
||||
class NodeAddAssetsApi(generics.UpdateAPIView):
|
||||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
@@ -233,8 +245,6 @@ class NodeAddAssetsApi(generics.UpdateAPIView):
|
||||
instance.assets.add(*tuple(assets))
|
||||
|
||||
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='patch')
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='put')
|
||||
class NodeRemoveAssetsApi(generics.UpdateAPIView):
|
||||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
@@ -247,12 +257,13 @@ class NodeRemoveAssetsApi(generics.UpdateAPIView):
|
||||
node.assets.remove(*assets)
|
||||
|
||||
# 把孤儿资产添加到 root 节点
|
||||
orphan_assets = Asset.objects.filter(id__in=[a.id for a in assets], nodes__isnull=True).distinct()
|
||||
orphan_assets = Asset.objects.filter(
|
||||
id__in=[a.id for a in assets],
|
||||
nodes__isnull=True
|
||||
).distinct()
|
||||
Node.org_root().assets.add(*orphan_assets)
|
||||
|
||||
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='patch')
|
||||
@method_decorator(org_level_transaction_lock(UPDATE_NODE_TREE_LOCK_KEY), name='put')
|
||||
class MoveAssetsToNodeApi(generics.UpdateAPIView):
|
||||
model = Node
|
||||
serializer_class = serializers.NodeAssetsSerializer
|
||||
|
||||
@@ -87,13 +87,13 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.SystemUserTaskSerializer
|
||||
|
||||
def do_push(self, system_user, assets_id=None):
|
||||
if assets_id is None:
|
||||
def do_push(self, system_user, asset_ids=None):
|
||||
if asset_ids is None:
|
||||
task = push_system_user_to_assets_manual.delay(system_user)
|
||||
else:
|
||||
username = self.request.query_params.get('username')
|
||||
task = push_system_user_to_assets.delay(
|
||||
system_user.id, assets_id, username=username
|
||||
system_user.id, asset_ids, username=username
|
||||
)
|
||||
return task
|
||||
|
||||
@@ -114,9 +114,9 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
system_user = self.get_object()
|
||||
if action == 'push':
|
||||
assets = [asset] if asset else assets
|
||||
assets_id = [asset.id for asset in assets]
|
||||
assets_id = assets_id if assets_id else None
|
||||
task = self.do_push(system_user, assets_id)
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
asset_ids = asset_ids if asset_ids else None
|
||||
task = self.do_push(system_user, asset_ids)
|
||||
else:
|
||||
task = self.do_test(system_user)
|
||||
data = getattr(serializer, '_data', {})
|
||||
|
||||
@@ -19,9 +19,10 @@ __all__ = [
|
||||
class RelationMixin:
|
||||
def get_queryset(self):
|
||||
queryset = self.model.objects.all()
|
||||
org_id = current_org.org_id()
|
||||
if org_id is not None:
|
||||
if not current_org.is_root():
|
||||
org_id = current_org.org_id()
|
||||
queryset = queryset.filter(systemuser__org_id=org_id)
|
||||
|
||||
queryset = queryset.annotate(systemuser_display=Concat(
|
||||
F('systemuser__name'), Value('('), F('systemuser__username'),
|
||||
Value(')')
|
||||
|
||||
@@ -1,16 +1,6 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.apps import AppConfig
|
||||
from django.db.models.signals import post_migrate
|
||||
|
||||
|
||||
def initial_some_nodes():
|
||||
from .models import Node
|
||||
Node.initial_some_nodes()
|
||||
|
||||
|
||||
def initial_some_nodes_callback(sender, **kwargs):
|
||||
initial_some_nodes()
|
||||
|
||||
|
||||
class AssetsConfig(AppConfig):
|
||||
@@ -19,7 +9,3 @@ class AssetsConfig(AppConfig):
|
||||
def ready(self):
|
||||
super().ready()
|
||||
from . import signals_handler
|
||||
try:
|
||||
initial_some_nodes()
|
||||
except Exception:
|
||||
post_migrate.connect(initial_some_nodes_callback, sender=self)
|
||||
|
||||
@@ -40,7 +40,7 @@ class BaseBackend:
|
||||
return values
|
||||
|
||||
@staticmethod
|
||||
def make_assets_as_id(assets):
|
||||
def make_assets_as_ids(assets):
|
||||
if not assets:
|
||||
return []
|
||||
if isinstance(assets[0], Asset):
|
||||
|
||||
@@ -69,9 +69,9 @@ class DBBackend(BaseBackend):
|
||||
self.queryset = self.queryset.filter(union_id=union_id)
|
||||
|
||||
def _filter_assets(self, assets):
|
||||
assets_id = self.make_assets_as_id(assets)
|
||||
if assets_id:
|
||||
self.queryset = self.queryset.filter(asset_id__in=assets_id)
|
||||
asset_ids = self.make_assets_as_ids(assets)
|
||||
if asset_ids:
|
||||
self.queryset = self.queryset.filter(asset_id__in=asset_ids)
|
||||
|
||||
def _filter_node(self, node):
|
||||
pass
|
||||
@@ -165,7 +165,7 @@ class SystemUserBackend(DBBackend):
|
||||
kwargs = self.get_annotate()
|
||||
filters = self.get_filter()
|
||||
qs = self.model.objects.all().annotate(**kwargs)
|
||||
if current_org.org_id() is not None:
|
||||
if not current_org.is_root():
|
||||
filters['org_id'] = current_org.org_id()
|
||||
qs = qs.filter(**filters)
|
||||
qs = self.qs_to_values(qs)
|
||||
|
||||
29
apps/assets/locks.py
Normal file
29
apps/assets/locks.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from orgs.utils import current_org
|
||||
from common.utils.lock import DistributedLock
|
||||
from assets.models import Node
|
||||
|
||||
|
||||
class NodeTreeUpdateLock(DistributedLock):
|
||||
name_template = 'assets.node.tree.update.<org_id:{org_id}>'
|
||||
|
||||
def get_name(self):
|
||||
if current_org:
|
||||
org_id = current_org.id
|
||||
else:
|
||||
org_id = 'current_org_is_null'
|
||||
name = self.name_template.format(
|
||||
org_id=org_id
|
||||
)
|
||||
return name
|
||||
|
||||
def __init__(self):
|
||||
name = self.get_name()
|
||||
super().__init__(name=name, release_on_transaction_commit=True, reentrant=True)
|
||||
|
||||
|
||||
class NodeAddChildrenLock(DistributedLock):
|
||||
name_template = 'assets.node.add_children.<org_id:{org_id}>'
|
||||
|
||||
def __init__(self, node: Node):
|
||||
name = self.name_template.format(org_id=node.org_id)
|
||||
super().__init__(name=name, release_on_transaction_commit=True)
|
||||
17
apps/assets/migrations/0066_auto_20210208_1802.py
Normal file
17
apps/assets/migrations/0066_auto_20210208_1802.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1 on 2021-02-08 10:02
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0065_auto_20210121_1549'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['hostname'], 'verbose_name': 'Asset'},
|
||||
),
|
||||
]
|
||||
48
apps/assets/migrations/0067_auto_20210311_1113.py
Normal file
48
apps/assets/migrations/0067_auto_20210311_1113.py
Normal file
@@ -0,0 +1,48 @@
|
||||
# Generated by Django 3.1 on 2021-03-11 03:13
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def migrate_cmd_filter_priority(apps, schema_editor):
|
||||
cmd_filter_rule_model = apps.get_model('assets', 'CommandFilterRule')
|
||||
cmd_filter_rules = cmd_filter_rule_model.objects.all()
|
||||
for cmd_filter_rule in cmd_filter_rules:
|
||||
cmd_filter_rule.priority = 100 - cmd_filter_rule.priority + 1
|
||||
|
||||
cmd_filter_rule_model.objects.bulk_update(cmd_filter_rules, fields=['priority'])
|
||||
|
||||
|
||||
def migrate_system_user_priority(apps, schema_editor):
|
||||
system_user_model = apps.get_model('assets', 'SystemUser')
|
||||
system_users = system_user_model.objects.all()
|
||||
for system_user in system_users:
|
||||
system_user.priority = 100 - system_user.priority + 1
|
||||
|
||||
system_user_model.objects.bulk_update(system_users, fields=['priority'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0066_auto_20210208_1802'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_cmd_filter_priority),
|
||||
migrations.RunPython(migrate_system_user_priority),
|
||||
migrations.AlterModelOptions(
|
||||
name='commandfilterrule',
|
||||
options={'ordering': ('priority', 'action'), 'verbose_name': 'Command filter rule'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='commandfilterrule',
|
||||
name='priority',
|
||||
field=models.IntegerField(default=50, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='priority',
|
||||
field=models.IntegerField(default=20, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority'),
|
||||
),
|
||||
]
|
||||
19
apps/assets/migrations/0068_auto_20210312_1455.py
Normal file
19
apps/assets/migrations/0068_auto_20210312_1455.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# Generated by Django 3.1 on 2021-03-12 06:55
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0067_auto_20210311_1113'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='priority',
|
||||
field=models.IntegerField(default=81, help_text='1-100, the lower the value will be match first', validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(100)], verbose_name='Priority'),
|
||||
),
|
||||
]
|
||||
61
apps/assets/migrations/0069_change_node_key0_to_key1.py
Normal file
61
apps/assets/migrations/0069_change_node_key0_to_key1.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from django.db import migrations
|
||||
from django.db.transaction import atomic
|
||||
|
||||
default_id = '00000000-0000-0000-0000-000000000002'
|
||||
|
||||
|
||||
def change_key0_to_key1(apps, schema_editor):
|
||||
from orgs.utils import set_current_org
|
||||
|
||||
# https://stackoverflow.com/questions/28777338/django-migrations-runpython-not-able-to-call-model-methods
|
||||
Organization = apps.get_model('orgs', 'Organization')
|
||||
Node = apps.get_model('assets', 'Node')
|
||||
|
||||
print()
|
||||
org = Organization.objects.get(id=default_id)
|
||||
set_current_org(org)
|
||||
|
||||
exists_0 = Node.objects.filter(key__startswith='0').exists()
|
||||
if not exists_0:
|
||||
print(f'--> Not exist key=0 nodes, do nothing.')
|
||||
return
|
||||
|
||||
key_1_count = Node.objects.filter(key__startswith='1').count()
|
||||
if key_1_count > 1:
|
||||
print(f'--> Node key=1 have children, can`t just delete it. Please contact JumpServer team')
|
||||
return
|
||||
|
||||
root_node = Node.objects.filter(key='1').first()
|
||||
if root_node and root_node.assets.exists():
|
||||
print(f'--> Node key=1 has assets, do nothing.')
|
||||
return
|
||||
|
||||
with atomic():
|
||||
if root_node:
|
||||
print(f'--> Delete node key=1')
|
||||
root_node.delete()
|
||||
|
||||
nodes_0 = Node.objects.filter(key__startswith='0')
|
||||
|
||||
for n in nodes_0:
|
||||
old_key = n.key
|
||||
key_list = n.key.split(':')
|
||||
key_list[0] = '1'
|
||||
new_key = ':'.join(key_list)
|
||||
new_parent_key = ':'.join(key_list[:-1])
|
||||
n.key = new_key
|
||||
n.parent_key = new_parent_key
|
||||
n.save()
|
||||
print('--> Modify key ( {} > {} )'.format(old_key, new_key))
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('orgs', '0010_auto_20210219_1241'),
|
||||
('assets', '0068_auto_20210312_1455'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(change_key0_to_key1)
|
||||
]
|
||||
@@ -17,7 +17,7 @@ from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from .base import ConnectivityMixin
|
||||
from .utils import Connectivity
|
||||
|
||||
__all__ = ['Asset', 'ProtocolsMixin', 'Platform']
|
||||
__all__ = ['Asset', 'ProtocolsMixin', 'Platform', 'AssetQuerySet']
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -41,13 +41,6 @@ def default_node():
|
||||
|
||||
|
||||
class AssetManager(OrgManager):
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().annotate(
|
||||
platform_base=models.F('platform__base')
|
||||
)
|
||||
|
||||
|
||||
class AssetOrgManager(OrgManager):
|
||||
pass
|
||||
|
||||
|
||||
@@ -230,7 +223,6 @@ class Asset(ProtocolsMixin, NodesRelationMixin, OrgModelMixin):
|
||||
comment = models.TextField(default='', blank=True, verbose_name=_('Comment'))
|
||||
|
||||
objects = AssetManager.from_queryset(AssetQuerySet)()
|
||||
org_objects = AssetOrgManager.from_queryset(AssetQuerySet)()
|
||||
_connectivity = None
|
||||
|
||||
def __str__(self):
|
||||
@@ -361,4 +353,4 @@ class Asset(ProtocolsMixin, NodesRelationMixin, OrgModelMixin):
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'hostname')]
|
||||
verbose_name = _("Asset")
|
||||
ordering = ["hostname", "ip"]
|
||||
ordering = ["hostname", ]
|
||||
|
||||
@@ -11,9 +11,12 @@ from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
|
||||
from common.db.models import ChoiceSet
|
||||
from common.utils import random_string
|
||||
from common.utils import (
|
||||
ssh_key_string_to_obj, ssh_key_gen, get_logger, lazyproperty
|
||||
)
|
||||
from common.utils.encode import ssh_pubkey_gen
|
||||
from common.validators import alphanumeric
|
||||
from common import fields
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
@@ -105,6 +108,19 @@ class AuthMixin:
|
||||
username = ''
|
||||
_prefer = 'system_user'
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self):
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
public_key = ssh_pubkey_gen(private_key=self.private_key, password=self.password)
|
||||
else:
|
||||
return ''
|
||||
|
||||
public_key_obj = sshpubkeys.SSHKey(public_key)
|
||||
fingerprint = public_key_obj.hash_md5()
|
||||
return fingerprint
|
||||
|
||||
@property
|
||||
def private_key_obj(self):
|
||||
if self.private_key:
|
||||
@@ -204,8 +220,8 @@ class AuthMixin:
|
||||
self.save()
|
||||
|
||||
@staticmethod
|
||||
def gen_password():
|
||||
return str(uuid.uuid4())
|
||||
def gen_password(length=36):
|
||||
return random_string(length, special_char=True)
|
||||
|
||||
@staticmethod
|
||||
def gen_key(username):
|
||||
|
||||
@@ -50,7 +50,7 @@ class CommandFilterRule(OrgModelMixin):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
filter = models.ForeignKey('CommandFilter', on_delete=models.CASCADE, verbose_name=_("Filter"), related_name='rules')
|
||||
type = models.CharField(max_length=16, default=TYPE_COMMAND, choices=TYPE_CHOICES, verbose_name=_("Type"))
|
||||
priority = models.IntegerField(default=50, verbose_name=_("Priority"), help_text=_("1-100, the higher will be match first"),
|
||||
priority = models.IntegerField(default=50, verbose_name=_("Priority"), help_text=_("1-100, the lower the value will be match first"),
|
||||
validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
content = models.TextField(verbose_name=_("Content"), help_text=_("One line one command"))
|
||||
action = models.IntegerField(default=ACTION_DENY, choices=ACTION_CHOICES, verbose_name=_("Action"))
|
||||
@@ -60,7 +60,7 @@ class CommandFilterRule(OrgModelMixin):
|
||||
created_by = models.CharField(max_length=128, blank=True, default='', verbose_name=_('Created by'))
|
||||
|
||||
class Meta:
|
||||
ordering = ('-priority', 'action')
|
||||
ordering = ('priority', 'action')
|
||||
verbose_name = _("Command filter rule")
|
||||
|
||||
@lazyproperty
|
||||
|
||||
@@ -16,17 +16,5 @@ class FavoriteAsset(CommonModelMixin):
|
||||
unique_together = ('user', 'asset')
|
||||
|
||||
@classmethod
|
||||
def get_user_favorite_assets_id(cls, user):
|
||||
def get_user_favorite_asset_ids(cls, user):
|
||||
return cls.objects.filter(user=user).values_list('asset', flat=True)
|
||||
|
||||
@classmethod
|
||||
def get_user_favorite_assets(cls, user, asset_perms_id=None):
|
||||
from assets.models import Asset
|
||||
from perms.utils.asset.user_permission import get_user_granted_all_assets
|
||||
asset_ids = get_user_granted_all_assets(
|
||||
user,
|
||||
via_mapping_node=False,
|
||||
asset_perms_id=asset_perms_id
|
||||
).values_list('id', flat=True)
|
||||
query_name = cls.asset.field.related_query_name()
|
||||
return Asset.org_objects.filter(**{f'{query_name}__user_id': user.id}, id__in=asset_ids).distinct()
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
import re
|
||||
import time
|
||||
import uuid
|
||||
import threading
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
|
||||
from collections import defaultdict
|
||||
from django.db import models, transaction
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, Manager
|
||||
from django.db.utils import IntegrityError
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.translation import ugettext
|
||||
from django.db.transaction import atomic
|
||||
from django.core.cache import cache
|
||||
|
||||
from common.utils.lock import DistributedLock
|
||||
from common.utils.common import timeit
|
||||
from common.db.models import output_as_string
|
||||
from common.utils import get_logger
|
||||
from common.utils.common import lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from orgs.utils import get_current_org, tmp_to_org
|
||||
from orgs.utils import get_current_org, tmp_to_org, tmp_to_root_org
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
__all__ = ['Node', 'FamilyMixin', 'compute_parent_key']
|
||||
__all__ = ['Node', 'FamilyMixin', 'compute_parent_key', 'NodeQuerySet']
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
@@ -247,9 +256,156 @@ class FamilyMixin:
|
||||
return [*tuple(ancestors), self, *tuple(children)]
|
||||
|
||||
|
||||
class NodeAssetsMixin:
|
||||
class NodeAllAssetsMappingMixin:
|
||||
# Use a new plan
|
||||
|
||||
# { org_id: { node_key: [ asset1_id, asset2_id ] } }
|
||||
orgid_nodekey_assetsid_mapping = defaultdict(dict)
|
||||
locks_for_get_mapping_from_cache = defaultdict(threading.Lock)
|
||||
|
||||
@classmethod
|
||||
def get_lock(cls, org_id):
|
||||
lock = cls.locks_for_get_mapping_from_cache[str(org_id)]
|
||||
return lock
|
||||
|
||||
@classmethod
|
||||
def get_node_all_asset_ids_mapping(cls, org_id):
|
||||
_mapping = cls.get_node_all_asset_ids_mapping_from_memory(org_id)
|
||||
if _mapping:
|
||||
return _mapping
|
||||
|
||||
logger.debug(f'Get node asset mapping from memory failed, acquire thread lock: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
with cls.get_lock(org_id):
|
||||
logger.debug(f'Acquired thread lock ok. check if mapping is in memory now: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
_mapping = cls.get_node_all_asset_ids_mapping_from_memory(org_id)
|
||||
if _mapping:
|
||||
logger.debug(f'Mapping is already in memory now: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
return _mapping
|
||||
|
||||
_mapping = cls.get_node_all_asset_ids_mapping_from_cache_or_generate_to_cache(org_id)
|
||||
cls.set_node_all_asset_ids_mapping_to_memory(org_id, mapping=_mapping)
|
||||
return _mapping
|
||||
|
||||
# from memory
|
||||
@classmethod
|
||||
def get_node_all_asset_ids_mapping_from_memory(cls, org_id):
|
||||
mapping = cls.orgid_nodekey_assetsid_mapping.get(org_id, {})
|
||||
return mapping
|
||||
|
||||
@classmethod
|
||||
def set_node_all_asset_ids_mapping_to_memory(cls, org_id, mapping):
|
||||
cls.orgid_nodekey_assetsid_mapping[org_id] = mapping
|
||||
|
||||
@classmethod
|
||||
def expire_node_all_asset_ids_mapping_from_memory(cls, org_id):
|
||||
org_id = str(org_id)
|
||||
cls.orgid_nodekey_assetsid_mapping.pop(org_id, None)
|
||||
|
||||
@classmethod
|
||||
def expire_all_orgs_node_all_asset_ids_mapping_from_memory(cls):
|
||||
orgs = Organization.objects.all()
|
||||
org_ids = [str(org.id) for org in orgs]
|
||||
org_ids.append(Organization.ROOT_ID)
|
||||
|
||||
for id in org_ids:
|
||||
cls.expire_node_all_asset_ids_mapping_from_memory(id)
|
||||
|
||||
# get order: from memory -> (from cache -> to generate)
|
||||
@classmethod
|
||||
def get_node_all_asset_ids_mapping_from_cache_or_generate_to_cache(cls, org_id):
|
||||
mapping = cls.get_node_all_asset_ids_mapping_from_cache(org_id)
|
||||
if mapping:
|
||||
return mapping
|
||||
|
||||
lock_key = f'KEY_LOCK_GENERATE_ORG_{org_id}_NODE_ALL_ASSET_ids_MAPPING'
|
||||
with DistributedLock(lock_key):
|
||||
# 这里使用无限期锁,原因是如果这里卡住了,就卡在数据库了,说明
|
||||
# 数据库繁忙,所以不应该再有线程执行这个操作,使数据库忙上加忙
|
||||
|
||||
_mapping = cls.get_node_all_asset_ids_mapping_from_cache(org_id)
|
||||
if _mapping:
|
||||
return _mapping
|
||||
|
||||
_mapping = cls.generate_node_all_asset_ids_mapping(org_id)
|
||||
cls.set_node_all_asset_ids_mapping_to_cache(org_id=org_id, mapping=_mapping)
|
||||
return _mapping
|
||||
|
||||
@classmethod
|
||||
def get_node_all_asset_ids_mapping_from_cache(cls, org_id):
|
||||
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
||||
mapping = cache.get(cache_key)
|
||||
logger.info(f'Get node asset mapping from cache {bool(mapping)}: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
return mapping
|
||||
|
||||
@classmethod
|
||||
def set_node_all_asset_ids_mapping_to_cache(cls, org_id, mapping):
|
||||
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
||||
cache.set(cache_key, mapping, timeout=None)
|
||||
|
||||
@classmethod
|
||||
def expire_node_all_asset_ids_mapping_from_cache(cls, org_id):
|
||||
cache_key = cls._get_cache_key_for_node_all_asset_ids_mapping(org_id)
|
||||
cache.delete(cache_key)
|
||||
|
||||
@staticmethod
|
||||
def _get_cache_key_for_node_all_asset_ids_mapping(org_id):
|
||||
return 'ASSETS_ORG_NODE_ALL_ASSET_ids_MAPPING_{}'.format(org_id)
|
||||
|
||||
@classmethod
|
||||
def generate_node_all_asset_ids_mapping(cls, org_id):
|
||||
from .asset import Asset
|
||||
|
||||
logger.info(f'Generate node asset mapping: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
t1 = time.time()
|
||||
with tmp_to_org(org_id):
|
||||
node_ids_key = Node.objects.annotate(
|
||||
char_id=output_as_string('id')
|
||||
).values_list('char_id', 'key')
|
||||
|
||||
# * 直接取出全部. filter(node__org_id=org_id)(大规模下会更慢)
|
||||
nodes_asset_ids = Asset.nodes.through.objects.all() \
|
||||
.annotate(char_node_id=output_as_string('node_id')) \
|
||||
.annotate(char_asset_id=output_as_string('asset_id')) \
|
||||
.values_list('char_node_id', 'char_asset_id')
|
||||
|
||||
node_id_ancestor_keys_mapping = {
|
||||
node_id: cls.get_node_ancestor_keys(node_key, with_self=True)
|
||||
for node_id, node_key in node_ids_key
|
||||
}
|
||||
|
||||
nodeid_assetsid_mapping = defaultdict(set)
|
||||
for node_id, asset_id in nodes_asset_ids:
|
||||
nodeid_assetsid_mapping[node_id].add(asset_id)
|
||||
|
||||
t2 = time.time()
|
||||
|
||||
mapping = defaultdict(set)
|
||||
for node_id, node_key in node_ids_key:
|
||||
asset_ids = nodeid_assetsid_mapping[node_id]
|
||||
node_ancestor_keys = node_id_ancestor_keys_mapping[node_id]
|
||||
for ancestor_key in node_ancestor_keys:
|
||||
mapping[ancestor_key].update(asset_ids)
|
||||
|
||||
t3 = time.time()
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2-t1, t3-t2))
|
||||
return mapping
|
||||
|
||||
|
||||
class NodeAssetsMixin(NodeAllAssetsMappingMixin):
|
||||
org_id: str
|
||||
key = ''
|
||||
id = None
|
||||
objects: Manager
|
||||
|
||||
def get_all_assets(self):
|
||||
from .asset import Asset
|
||||
@@ -263,8 +419,7 @@ class NodeAssetsMixin:
|
||||
# 可是 startswith 会导致表关联时 Asset 索引失效
|
||||
from .asset import Asset
|
||||
node_ids = cls.objects.filter(
|
||||
Q(key__startswith=f'{key}:') |
|
||||
Q(key=key)
|
||||
Q(key__startswith=f'{key}:') | Q(key=key)
|
||||
).values_list('id', flat=True).distinct()
|
||||
assets = Asset.objects.filter(
|
||||
nodes__id__in=list(node_ids)
|
||||
@@ -283,54 +438,42 @@ class NodeAssetsMixin:
|
||||
return self.get_all_assets().valid()
|
||||
|
||||
@classmethod
|
||||
def get_nodes_all_assets_ids(cls, nodes_keys):
|
||||
assets_ids = cls.get_nodes_all_assets(nodes_keys).values_list('id', flat=True)
|
||||
return assets_ids
|
||||
def get_nodes_all_asset_ids_by_keys(cls, nodes_keys):
|
||||
nodes = Node.objects.filter(key__in=nodes_keys)
|
||||
asset_ids = cls.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
return asset_ids
|
||||
|
||||
@classmethod
|
||||
def get_nodes_all_assets(cls, nodes_keys, extra_assets_ids=None):
|
||||
def get_nodes_all_assets(cls, *nodes):
|
||||
from .asset import Asset
|
||||
nodes_keys = cls.clean_children_keys(nodes_keys)
|
||||
q = Q()
|
||||
node_ids = ()
|
||||
for key in nodes_keys:
|
||||
q |= Q(key__startswith=f'{key}:')
|
||||
q |= Q(key=key)
|
||||
if q:
|
||||
node_ids = Node.objects.filter(q).distinct().values_list('id', flat=True)
|
||||
node_ids = set()
|
||||
descendant_node_query = Q()
|
||||
for n in nodes:
|
||||
node_ids.add(n.id)
|
||||
descendant_node_query |= Q(key__istartswith=f'{n.key}:')
|
||||
if descendant_node_query:
|
||||
_ids = Node.objects.order_by().filter(descendant_node_query).values_list('id', flat=True)
|
||||
node_ids.update(_ids)
|
||||
return Asset.objects.order_by().filter(nodes__id__in=node_ids).distinct()
|
||||
|
||||
q = Q(nodes__id__in=list(node_ids))
|
||||
if extra_assets_ids:
|
||||
q |= Q(id__in=extra_assets_ids)
|
||||
if q:
|
||||
return Asset.org_objects.filter(q).distinct()
|
||||
else:
|
||||
return Asset.objects.none()
|
||||
def get_all_asset_ids(self):
|
||||
asset_ids = self.get_all_asset_ids_by_node_key(org_id=self.org_id, node_key=self.key)
|
||||
return set(asset_ids)
|
||||
|
||||
@classmethod
|
||||
def get_all_asset_ids_by_node_key(cls, org_id, node_key):
|
||||
org_id = str(org_id)
|
||||
nodekey_assetsid_mapping = cls.get_node_all_asset_ids_mapping(org_id)
|
||||
asset_ids = nodekey_assetsid_mapping.get(node_key, [])
|
||||
return set(asset_ids)
|
||||
|
||||
|
||||
class SomeNodesMixin:
|
||||
key = ''
|
||||
default_key = '1'
|
||||
default_value = 'Default'
|
||||
empty_key = '-11'
|
||||
empty_value = _("empty")
|
||||
|
||||
@classmethod
|
||||
def default_node(cls):
|
||||
with tmp_to_org(Organization.default()):
|
||||
defaults = {'value': cls.default_value}
|
||||
try:
|
||||
obj, created = cls.objects.get_or_create(
|
||||
defaults=defaults, key=cls.default_key,
|
||||
)
|
||||
except IntegrityError as e:
|
||||
logger.error("Create default node failed: {}".format(e))
|
||||
cls.modify_other_org_root_node_key()
|
||||
obj, created = cls.objects.get_or_create(
|
||||
defaults=defaults, key=cls.default_key,
|
||||
)
|
||||
return obj
|
||||
|
||||
def is_default_node(self):
|
||||
return self.key == self.default_key
|
||||
|
||||
@@ -341,70 +484,61 @@ class SomeNodesMixin:
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def get_next_org_root_node_key(cls):
|
||||
with tmp_to_org(Organization.root()):
|
||||
org_nodes_roots = cls.objects.filter(key__regex=r'^[0-9]+$')
|
||||
org_nodes_roots_keys = org_nodes_roots.values_list('key', flat=True)
|
||||
if not org_nodes_roots_keys:
|
||||
org_nodes_roots_keys = ['1']
|
||||
max_key = max([int(k) for k in org_nodes_roots_keys])
|
||||
key = str(max_key + 1) if max_key != 0 else '2'
|
||||
return key
|
||||
def org_root(cls):
|
||||
# 如果使用current_org 在set_current_org时会死循环
|
||||
ori_org = get_current_org()
|
||||
|
||||
if ori_org and ori_org.is_default():
|
||||
return cls.default_node()
|
||||
|
||||
if ori_org and ori_org.is_root():
|
||||
return None
|
||||
|
||||
org_roots = cls.org_root_nodes()
|
||||
org_roots_length = len(org_roots)
|
||||
|
||||
if org_roots_length == 1:
|
||||
root = org_roots[0]
|
||||
return root
|
||||
elif org_roots_length == 0:
|
||||
root = cls.create_org_root_node()
|
||||
return root
|
||||
else:
|
||||
error = 'Current org {} root node not 1, get {}'.format(ori_org, org_roots_length)
|
||||
raise ValueError(error)
|
||||
|
||||
@classmethod
|
||||
def default_node(cls):
|
||||
default_org = Organization.default()
|
||||
with tmp_to_org(default_org):
|
||||
defaults = {'value': default_org.name}
|
||||
obj, created = cls.objects.get_or_create(defaults=defaults, key=cls.default_key)
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def create_org_root_node(cls):
|
||||
# 如果使用current_org 在set_current_org时会死循环
|
||||
ori_org = get_current_org()
|
||||
with transaction.atomic():
|
||||
if not ori_org.is_real():
|
||||
return cls.default_node()
|
||||
key = cls.get_next_org_root_node_key()
|
||||
root = cls.objects.create(key=key, value=ori_org.name)
|
||||
return root
|
||||
|
||||
@classmethod
|
||||
def org_root(cls):
|
||||
root = cls.objects.filter(parent_key='')\
|
||||
.filter(key__regex=r'^[0-9]+$')\
|
||||
.exclude(key__startswith='-')\
|
||||
.order_by('key')
|
||||
if root:
|
||||
return root[0]
|
||||
else:
|
||||
return cls.create_org_root_node()
|
||||
def get_next_org_root_node_key(cls):
|
||||
with tmp_to_root_org():
|
||||
org_nodes_roots = cls.org_root_nodes()
|
||||
org_nodes_roots_keys = org_nodes_roots.values_list('key', flat=True)
|
||||
if not org_nodes_roots_keys:
|
||||
org_nodes_roots_keys = ['1']
|
||||
max_key = max([int(k) for k in org_nodes_roots_keys])
|
||||
key = str(max_key + 1) if max_key > 0 else '2'
|
||||
return key
|
||||
|
||||
@classmethod
|
||||
def initial_some_nodes(cls):
|
||||
cls.default_node()
|
||||
|
||||
@classmethod
|
||||
def modify_other_org_root_node_key(cls):
|
||||
"""
|
||||
解决创建 default 节点失败的问题,
|
||||
因为在其他组织下存在 default 节点,故在 DEFAULT 组织下 get 不到 create 失败
|
||||
"""
|
||||
logger.info("Modify other org root node key")
|
||||
|
||||
with tmp_to_org(Organization.root()):
|
||||
node_key1 = cls.objects.filter(key='1').first()
|
||||
if not node_key1:
|
||||
logger.info("Not found node that `key` = 1")
|
||||
return
|
||||
if not node_key1.org.is_real():
|
||||
logger.info("Org is not real for node that `key` = 1")
|
||||
return
|
||||
|
||||
with transaction.atomic():
|
||||
with tmp_to_org(node_key1.org):
|
||||
org_root_node_new_key = cls.get_next_org_root_node_key()
|
||||
for n in cls.objects.all():
|
||||
old_key = n.key
|
||||
key_list = n.key.split(':')
|
||||
key_list[0] = org_root_node_new_key
|
||||
new_key = ':'.join(key_list)
|
||||
n.key = new_key
|
||||
n.save()
|
||||
logger.info('Modify key ( {} > {} )'.format(old_key, new_key))
|
||||
def org_root_nodes(cls):
|
||||
root_nodes = cls.objects.filter(parent_key='', key__regex=r'^[0-9]+$') \
|
||||
.exclude(key__startswith='-').order_by('key')
|
||||
return root_nodes
|
||||
|
||||
|
||||
class Node(OrgModelMixin, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
|
||||
@@ -87,6 +87,9 @@ class SystemUser(BaseUser):
|
||||
(PROTOCOL_POSTGRESQL, 'postgresql'),
|
||||
(PROTOCOL_K8S, 'k8s'),
|
||||
)
|
||||
|
||||
SUPPORT_PUSH_PROTOCOLS = [PROTOCOL_SSH, PROTOCOL_RDP]
|
||||
|
||||
ASSET_CATEGORY_PROTOCOLS = [
|
||||
PROTOCOL_SSH, PROTOCOL_RDP, PROTOCOL_TELNET, PROTOCOL_VNC
|
||||
]
|
||||
@@ -116,7 +119,7 @@ class SystemUser(BaseUser):
|
||||
assets = models.ManyToManyField('assets.Asset', blank=True, verbose_name=_("Assets"))
|
||||
users = models.ManyToManyField('users.User', blank=True, verbose_name=_("Users"))
|
||||
groups = models.ManyToManyField('users.UserGroup', blank=True, verbose_name=_("User groups"))
|
||||
priority = models.IntegerField(default=20, verbose_name=_("Priority"), validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
priority = models.IntegerField(default=81, verbose_name=_("Priority"), help_text=_("1-100, the lower the value will be match first"), validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
protocol = models.CharField(max_length=16, choices=PROTOCOL_CHOICES, default='ssh', verbose_name=_('Protocol'))
|
||||
auto_push = models.BooleanField(default=True, verbose_name=_('Auto push'))
|
||||
sudo = models.TextField(default='/bin/whoami', verbose_name=_('Sudo'))
|
||||
@@ -151,11 +154,15 @@ class SystemUser(BaseUser):
|
||||
return self.get_login_mode_display()
|
||||
|
||||
def is_need_push(self):
|
||||
if self.auto_push and self.protocol in [self.PROTOCOL_SSH, self.PROTOCOL_RDP]:
|
||||
if self.auto_push and self.is_protocol_support_push:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_protocol_support_push(self):
|
||||
return self.protocol in self.SUPPORT_PUSH_PROTOCOLS
|
||||
|
||||
@property
|
||||
def is_need_cmd_filter(self):
|
||||
return self.protocol not in [self.PROTOCOL_RDP, self.PROTOCOL_VNC]
|
||||
@@ -198,10 +205,10 @@ class SystemUser(BaseUser):
|
||||
def get_all_assets(self):
|
||||
from assets.models import Node
|
||||
nodes_keys = self.nodes.all().values_list('key', flat=True)
|
||||
assets_ids = set(self.assets.all().values_list('id', flat=True))
|
||||
nodes_assets_ids = Node.get_nodes_all_assets_ids(nodes_keys)
|
||||
assets_ids.update(nodes_assets_ids)
|
||||
assets = Asset.objects.filter(id__in=assets_ids)
|
||||
asset_ids = set(self.assets.all().values_list('id', flat=True))
|
||||
nodes_asset_ids = Node.get_nodes_all_asset_ids_by_keys(nodes_keys)
|
||||
asset_ids.update(nodes_asset_ids)
|
||||
assets = Asset.objects.filter(id__in=asset_ids)
|
||||
return assets
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,39 +1,52 @@
|
||||
from rest_framework.pagination import LimitOffsetPagination
|
||||
from rest_framework.request import Request
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets.models import Node
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class AssetPaginationBase(LimitOffsetPagination):
|
||||
|
||||
def init_attrs(self, queryset, request: Request, view=None):
|
||||
self._request = request
|
||||
self._view = view
|
||||
self._user = request.user
|
||||
|
||||
def paginate_queryset(self, queryset, request: Request, view=None):
|
||||
self.init_attrs(queryset, request, view)
|
||||
return super().paginate_queryset(queryset, request, view=None)
|
||||
|
||||
class AssetLimitOffsetPagination(LimitOffsetPagination):
|
||||
"""
|
||||
需要与 `assets.api.mixin.FilterAssetByNodeMixin` 配合使用
|
||||
"""
|
||||
def get_count(self, queryset):
|
||||
"""
|
||||
1. 如果查询节点下的所有资产,那 count 使用 Node.assets_amount
|
||||
2. 如果有其他过滤条件使用 super
|
||||
3. 如果只查询该节点下的资产使用 super
|
||||
"""
|
||||
exclude_query_params = {
|
||||
self.limit_query_param,
|
||||
self.offset_query_param,
|
||||
'node', 'all', 'show_current_asset',
|
||||
'node_id', 'display', 'draw', 'fields_size',
|
||||
'key', 'all', 'show_current_asset',
|
||||
'cache_policy', 'display', 'draw',
|
||||
'order', 'node', 'node_id', 'fields_size',
|
||||
}
|
||||
|
||||
for k, v in self._request.query_params.items():
|
||||
if k not in exclude_query_params and v is not None:
|
||||
logger.warn(f'Not hit node.assets_amount because find a unknow query_param `{k}` -> {self._request.get_full_path()}')
|
||||
return super().get_count(queryset)
|
||||
node_assets_count = self.get_count_from_nodes(queryset)
|
||||
if node_assets_count is None:
|
||||
return super().get_count(queryset)
|
||||
return node_assets_count
|
||||
|
||||
def get_count_from_nodes(self, queryset):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NodeAssetTreePagination(AssetPaginationBase):
|
||||
def get_count_from_nodes(self, queryset):
|
||||
is_query_all = self._view.is_query_node_all_assets
|
||||
if is_query_all:
|
||||
node = self._view.node
|
||||
if not node:
|
||||
node = Node.org_root()
|
||||
return node.assets_amount
|
||||
return super().get_count(queryset)
|
||||
|
||||
def paginate_queryset(self, queryset, request: Request, view=None):
|
||||
self._request = request
|
||||
self._view = view
|
||||
return super().paginate_queryset(queryset, request, view=None)
|
||||
if node:
|
||||
logger.debug(f'Hit node.assets_amount[{node.assets_amount}] -> {self._request.get_full_path()}')
|
||||
return node.assets_amount
|
||||
return None
|
||||
|
||||
@@ -3,8 +3,6 @@
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.drf.serializers import AdaptedBulkListSerializer
|
||||
|
||||
from ..models import Node, AdminUser
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
@@ -17,7 +15,6 @@ class AdminUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
"""
|
||||
|
||||
class Meta:
|
||||
list_serializer_class = AdaptedBulkListSerializer
|
||||
model = AdminUser
|
||||
fields = [
|
||||
'id', 'name', 'username', 'password', 'private_key', 'public_key',
|
||||
@@ -26,6 +23,7 @@ class AdminUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
read_only_fields = ['date_created', 'date_updated', 'created_by', 'assets_amount']
|
||||
|
||||
extra_kwargs = {
|
||||
'username': {"required": True},
|
||||
'password': {"write_only": True},
|
||||
'private_key': {"write_only": True},
|
||||
'public_key': {"write_only": True},
|
||||
@@ -33,6 +31,11 @@ class AdminUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class AdminUserDetailSerializer(AdminUserSerializer):
|
||||
class Meta(AdminUserSerializer.Meta):
|
||||
fields = AdminUserSerializer.Meta.fields + ['ssh_key_fingerprint']
|
||||
|
||||
|
||||
class AdminUserAuthSerializer(AuthSerializer):
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -111,7 +111,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer):
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset.select_related('admin_user', 'domain', 'platform')
|
||||
queryset = queryset.prefetch_related('admin_user', 'domain', 'platform')
|
||||
queryset = queryset.prefetch_related('nodes', 'labels')
|
||||
return queryset
|
||||
|
||||
@@ -166,16 +166,9 @@ class AssetDisplaySerializer(AssetSerializer):
|
||||
'connectivity',
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = super().setup_eager_loading(queryset)
|
||||
queryset = queryset\
|
||||
.annotate(admin_user_username=F('admin_user__username'))
|
||||
return queryset
|
||||
|
||||
|
||||
class PlatformSerializer(serializers.ModelSerializer):
|
||||
meta = serializers.DictField(required=False, allow_null=True)
|
||||
meta = serializers.DictField(required=False, allow_null=True, label=_('Meta'))
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -41,10 +41,6 @@ class AuthSerializerMixin:
|
||||
def validate_private_key(self, private_key):
|
||||
if not private_key:
|
||||
return
|
||||
if 'OPENSSH' in private_key:
|
||||
msg = _("Not support openssh format key, using "
|
||||
"ssh-keygen -t rsa -m pem to generate")
|
||||
raise serializers.ValidationError(msg)
|
||||
password = self.initial_data.get("password")
|
||||
valid = validate_ssh_private_key(private_key, password)
|
||||
if not valid:
|
||||
|
||||
@@ -77,8 +77,6 @@ class GatewayWithAuthSerializer(GatewaySerializer):
|
||||
return fields
|
||||
|
||||
|
||||
|
||||
|
||||
class DomainWithGatewaySerializer(BulkOrgResourceModelSerializer):
|
||||
gateways = GatewayWithAuthSerializer(many=True, read_only=True)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.drf.serializers import AdaptedBulkListSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
@@ -9,16 +10,17 @@ from ..models import Label
|
||||
|
||||
|
||||
class LabelSerializer(BulkOrgResourceModelSerializer):
|
||||
asset_count = serializers.SerializerMethodField()
|
||||
asset_count = serializers.SerializerMethodField(label=_("Assets amount"))
|
||||
category_display = serializers.ReadOnlyField(source='get_category_display', label=_('Category display'))
|
||||
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = [
|
||||
'id', 'name', 'value', 'category', 'is_active', 'comment',
|
||||
'date_created', 'asset_count', 'assets', 'get_category_display'
|
||||
'date_created', 'asset_count', 'assets', 'category_display'
|
||||
]
|
||||
read_only_fields = (
|
||||
'category', 'date_created', 'asset_count', 'get_category_display'
|
||||
'category', 'date_created', 'asset_count',
|
||||
)
|
||||
extra_kwargs = {
|
||||
'assets': {'required': False}
|
||||
|
||||
@@ -33,7 +33,7 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
'priority', 'username_same_with_user',
|
||||
'auto_push', 'cmd_filters', 'sudo', 'shell', 'comment',
|
||||
'auto_generate_key', 'sftp_root', 'token',
|
||||
'assets_amount', 'date_created', 'created_by',
|
||||
'assets_amount', 'date_created', 'date_updated', 'created_by',
|
||||
'home', 'system_groups', 'ad_domain'
|
||||
]
|
||||
extra_kwargs = {
|
||||
@@ -155,7 +155,8 @@ class SystemUserListSerializer(SystemUserSerializer):
|
||||
'auto_push', 'sudo', 'shell', 'comment',
|
||||
"assets_amount", 'home', 'system_groups',
|
||||
'auto_generate_key', 'ad_domain',
|
||||
'sftp_root',
|
||||
'sftp_root', 'created_by', 'date_created',
|
||||
'date_updated',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {"write_only": True},
|
||||
|
||||
3
apps/assets/signals_handler/__init__.py
Normal file
3
apps/assets/signals_handler/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .common import *
|
||||
from .node_assets_amount import *
|
||||
from .node_assets_mapping import *
|
||||
@@ -1,21 +1,17 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from operator import add, sub
|
||||
|
||||
from assets.utils import is_asset_exists_in_node
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete, post_delete, pre_save
|
||||
)
|
||||
from django.db.models import Q, F
|
||||
from django.dispatch import receiver
|
||||
|
||||
from common.exceptions import M2MReverseNotAllowed
|
||||
from common.const.signals import PRE_ADD, POST_ADD, POST_REMOVE, PRE_CLEAR, PRE_REMOVE
|
||||
from common.const.signals import POST_ADD, POST_REMOVE, PRE_REMOVE
|
||||
from common.utils import get_logger
|
||||
from common.decorator import on_transaction_commit
|
||||
from .models import Asset, SystemUser, Node, compute_parent_key
|
||||
from assets.models import Asset, SystemUser, Node
|
||||
from users.models import User
|
||||
from .tasks import (
|
||||
from assets.tasks import (
|
||||
update_assets_hardware_info_util,
|
||||
test_asset_connectivity_util,
|
||||
push_system_user_to_assets_manual,
|
||||
@@ -23,7 +19,6 @@ from .tasks import (
|
||||
add_nodes_assets_to_system_users
|
||||
)
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@@ -87,13 +82,13 @@ def on_system_user_assets_change(instance, action, model, pk_set, **kwargs):
|
||||
return
|
||||
logger.debug("System user assets change signal recv: {}".format(instance))
|
||||
if model == Asset:
|
||||
system_users_id = [instance.id]
|
||||
assets_id = pk_set
|
||||
system_user_ids = [instance.id]
|
||||
asset_ids = pk_set
|
||||
else:
|
||||
system_users_id = pk_set
|
||||
assets_id = [instance.id]
|
||||
for system_user_id in system_users_id:
|
||||
push_system_user_to_assets.delay(system_user_id, assets_id)
|
||||
system_user_ids = pk_set
|
||||
asset_ids = [instance.id]
|
||||
for system_user_id in system_user_ids:
|
||||
push_system_user_to_assets.delay(system_user_id, asset_ids)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=SystemUser.users.through)
|
||||
@@ -198,138 +193,11 @@ def on_asset_nodes_add(instance, action, reverse, pk_set, **kwargs):
|
||||
systemuser_id=system_user_id,
|
||||
asset_id=asset_id
|
||||
))
|
||||
push_system_user_to_assets.delay(system_user_id, asset_ids_to_push)
|
||||
if asset_ids_to_push:
|
||||
push_system_user_to_assets.delay(system_user_id, asset_ids_to_push)
|
||||
m2m_model.objects.bulk_create(to_create)
|
||||
|
||||
|
||||
def _update_node_assets_amount(node: Node, asset_pk_set: set, operator=add):
|
||||
"""
|
||||
一个节点与多个资产关系变化时,更新计数
|
||||
|
||||
:param node: 节点实例
|
||||
:param asset_pk_set: 资产的`id`集合, 内部不会修改该值
|
||||
:param operator: 操作
|
||||
* -> Node
|
||||
# -> Asset
|
||||
|
||||
* [3]
|
||||
/ \
|
||||
* * [2]
|
||||
/ \
|
||||
* * [1]
|
||||
/ / \
|
||||
* [a] # # [b]
|
||||
|
||||
"""
|
||||
# 获取节点[1]祖先节点的 `key` 含自己,也就是[1, 2, 3]节点的`key`
|
||||
ancestor_keys = node.get_ancestor_keys(with_self=True)
|
||||
ancestors = Node.objects.filter(key__in=ancestor_keys).order_by('-key')
|
||||
to_update = []
|
||||
for ancestor in ancestors:
|
||||
# 迭代祖先节点的`key`,顺序是 [1] -> [2] -> [3]
|
||||
# 查询该节点及其后代节点是否包含要操作的资产,将包含的从要操作的
|
||||
# 资产集合中去掉,他们是重复节点,无论增加或删除都不会影响节点的资产数量
|
||||
|
||||
asset_pk_set -= set(Asset.objects.filter(
|
||||
id__in=asset_pk_set
|
||||
).filter(
|
||||
Q(nodes__key__istartswith=f'{ancestor.key}:') |
|
||||
Q(nodes__key=ancestor.key)
|
||||
).distinct().values_list('id', flat=True))
|
||||
if not asset_pk_set:
|
||||
# 要操作的资产集合为空,说明都是重复资产,不用改变节点资产数量
|
||||
# 而且既然它包含了,它的祖先节点肯定也包含了,所以祖先节点都不用
|
||||
# 处理了
|
||||
break
|
||||
ancestor.assets_amount = operator(F('assets_amount'), len(asset_pk_set))
|
||||
to_update.append(ancestor)
|
||||
Node.objects.bulk_update(to_update, fields=('assets_amount', 'parent_key'))
|
||||
|
||||
|
||||
def _remove_ancestor_keys(ancestor_key, tree_set):
|
||||
# 这里判断 `ancestor_key` 不能是空,防止数据错误导致的死循环
|
||||
# 判断是否在集合里,来区分是否已被处理过
|
||||
while ancestor_key and ancestor_key in tree_set:
|
||||
tree_set.remove(ancestor_key)
|
||||
ancestor_key = compute_parent_key(ancestor_key)
|
||||
|
||||
|
||||
def _update_nodes_asset_amount(node_keys, asset_pk, operator):
|
||||
"""
|
||||
一个资产与多个节点关系变化时,更新计数
|
||||
|
||||
:param node_keys: 节点 id 的集合
|
||||
:param asset_pk: 资产 id
|
||||
:param operator: 操作
|
||||
"""
|
||||
|
||||
# 所有相关节点的祖先节点,组成一棵局部树
|
||||
ancestor_keys = set()
|
||||
for key in node_keys:
|
||||
ancestor_keys.update(Node.get_node_ancestor_keys(key))
|
||||
|
||||
# 相关节点可能是其他相关节点的祖先节点,如果是从相关节点里干掉
|
||||
node_keys -= ancestor_keys
|
||||
|
||||
to_update_keys = []
|
||||
for key in node_keys:
|
||||
# 遍历相关节点,处理它及其祖先节点
|
||||
# 查询该节点是否包含待处理资产
|
||||
exists = is_asset_exists_in_node(asset_pk, key)
|
||||
parent_key = compute_parent_key(key)
|
||||
|
||||
if exists:
|
||||
# 如果资产在该节点,那么他及其祖先节点都不用处理
|
||||
_remove_ancestor_keys(parent_key, ancestor_keys)
|
||||
continue
|
||||
else:
|
||||
# 不存在,要更新本节点
|
||||
to_update_keys.append(key)
|
||||
# 这里判断 `parent_key` 不能是空,防止数据错误导致的死循环
|
||||
# 判断是否在集合里,来区分是否已被处理过
|
||||
while parent_key and parent_key in ancestor_keys:
|
||||
exists = is_asset_exists_in_node(asset_pk, parent_key)
|
||||
if exists:
|
||||
_remove_ancestor_keys(parent_key, ancestor_keys)
|
||||
break
|
||||
else:
|
||||
to_update_keys.append(parent_key)
|
||||
ancestor_keys.remove(parent_key)
|
||||
parent_key = compute_parent_key(parent_key)
|
||||
|
||||
Node.objects.filter(key__in=to_update_keys).update(
|
||||
assets_amount=operator(F('assets_amount'), 1)
|
||||
)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
def update_nodes_assets_amount(action, instance, reverse, pk_set, **kwargs):
|
||||
# 不允许 `pre_clear` ,因为该信号没有 `pk_set`
|
||||
# [官网](https://docs.djangoproject.com/en/3.1/ref/signals/#m2m-changed)
|
||||
refused = (PRE_CLEAR,)
|
||||
if action in refused:
|
||||
raise ValueError
|
||||
|
||||
mapper = {
|
||||
PRE_ADD: add,
|
||||
POST_REMOVE: sub
|
||||
}
|
||||
if action not in mapper:
|
||||
return
|
||||
|
||||
operator = mapper[action]
|
||||
|
||||
if reverse:
|
||||
node: Node = instance
|
||||
asset_pk_set = set(pk_set)
|
||||
_update_node_assets_amount(node, asset_pk_set, operator)
|
||||
else:
|
||||
asset_pk = instance.id
|
||||
# 与资产直接关联的节点
|
||||
node_keys = set(Node.objects.filter(id__in=pk_set).values_list('key', flat=True))
|
||||
_update_nodes_asset_amount(node_keys, asset_pk, operator)
|
||||
|
||||
|
||||
RELATED_NODE_IDS = '_related_node_ids'
|
||||
|
||||
|
||||
160
apps/assets/signals_handler/node_assets_amount.py
Normal file
160
apps/assets/signals_handler/node_assets_amount.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from operator import add, sub
|
||||
from django.db.models import Q, F
|
||||
from django.dispatch import receiver
|
||||
from django.db.models.signals import (
|
||||
m2m_changed
|
||||
)
|
||||
|
||||
from orgs.utils import ensure_in_real_or_default_org, tmp_to_org
|
||||
from common.const.signals import PRE_ADD, POST_REMOVE, PRE_CLEAR
|
||||
from common.utils import get_logger
|
||||
from assets.models import Asset, Node, compute_parent_key
|
||||
from assets.locks import NodeTreeUpdateLock
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
def on_node_asset_change(sender, action, instance, reverse, pk_set, **kwargs):
|
||||
# 不允许 `pre_clear` ,因为该信号没有 `pk_set`
|
||||
# [官网](https://docs.djangoproject.com/en/3.1/ref/signals/#m2m-changed)
|
||||
refused = (PRE_CLEAR,)
|
||||
if action in refused:
|
||||
raise ValueError
|
||||
|
||||
mapper = {
|
||||
PRE_ADD: add,
|
||||
POST_REMOVE: sub
|
||||
}
|
||||
if action not in mapper:
|
||||
return
|
||||
|
||||
operator = mapper[action]
|
||||
|
||||
with tmp_to_org(instance.org):
|
||||
if reverse:
|
||||
node: Node = instance
|
||||
asset_pk_set = set(pk_set)
|
||||
NodeAssetsAmountUtils.update_node_assets_amount(node, asset_pk_set, operator)
|
||||
else:
|
||||
asset_pk = instance.id
|
||||
# 与资产直接关联的节点
|
||||
node_keys = set(Node.objects.filter(id__in=pk_set).values_list('key', flat=True))
|
||||
NodeAssetsAmountUtils.update_nodes_asset_amount(node_keys, asset_pk, operator)
|
||||
|
||||
|
||||
class NodeAssetsAmountUtils:
|
||||
|
||||
@classmethod
|
||||
def _remove_ancestor_keys(cls, ancestor_key, tree_set):
|
||||
# 这里判断 `ancestor_key` 不能是空,防止数据错误导致的死循环
|
||||
# 判断是否在集合里,来区分是否已被处理过
|
||||
while ancestor_key and ancestor_key in tree_set:
|
||||
tree_set.remove(ancestor_key)
|
||||
ancestor_key = compute_parent_key(ancestor_key)
|
||||
|
||||
@classmethod
|
||||
def _is_asset_exists_in_node(cls, asset_pk, node_key):
|
||||
exists = Asset.objects.filter(
|
||||
Q(nodes__key__istartswith=f'{node_key}:') | Q(nodes__key=node_key)
|
||||
).filter(id=asset_pk).exists()
|
||||
return exists
|
||||
|
||||
@classmethod
|
||||
@ensure_in_real_or_default_org
|
||||
@NodeTreeUpdateLock()
|
||||
def update_nodes_asset_amount(cls, node_keys, asset_pk, operator):
|
||||
"""
|
||||
一个资产与多个节点关系变化时,更新计数
|
||||
|
||||
:param node_keys: 节点 id 的集合
|
||||
:param asset_pk: 资产 id
|
||||
:param operator: 操作
|
||||
"""
|
||||
|
||||
# 所有相关节点的祖先节点,组成一棵局部树
|
||||
ancestor_keys = set()
|
||||
for key in node_keys:
|
||||
ancestor_keys.update(Node.get_node_ancestor_keys(key))
|
||||
|
||||
# 相关节点可能是其他相关节点的祖先节点,如果是从相关节点里干掉
|
||||
node_keys -= ancestor_keys
|
||||
|
||||
to_update_keys = []
|
||||
for key in node_keys:
|
||||
# 遍历相关节点,处理它及其祖先节点
|
||||
# 查询该节点是否包含待处理资产
|
||||
exists = cls._is_asset_exists_in_node(asset_pk, key)
|
||||
parent_key = compute_parent_key(key)
|
||||
|
||||
if exists:
|
||||
# 如果资产在该节点,那么他及其祖先节点都不用处理
|
||||
cls._remove_ancestor_keys(parent_key, ancestor_keys)
|
||||
continue
|
||||
else:
|
||||
# 不存在,要更新本节点
|
||||
to_update_keys.append(key)
|
||||
# 这里判断 `parent_key` 不能是空,防止数据错误导致的死循环
|
||||
# 判断是否在集合里,来区分是否已被处理过
|
||||
while parent_key and parent_key in ancestor_keys:
|
||||
exists = cls._is_asset_exists_in_node(asset_pk, parent_key)
|
||||
if exists:
|
||||
cls._remove_ancestor_keys(parent_key, ancestor_keys)
|
||||
break
|
||||
else:
|
||||
to_update_keys.append(parent_key)
|
||||
ancestor_keys.remove(parent_key)
|
||||
parent_key = compute_parent_key(parent_key)
|
||||
|
||||
Node.objects.filter(key__in=to_update_keys).update(
|
||||
assets_amount=operator(F('assets_amount'), 1)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@ensure_in_real_or_default_org
|
||||
@NodeTreeUpdateLock()
|
||||
def update_node_assets_amount(cls, node: Node, asset_pk_set: set, operator=add):
|
||||
"""
|
||||
一个节点与多个资产关系变化时,更新计数
|
||||
|
||||
:param node: 节点实例
|
||||
:param asset_pk_set: 资产的`id`集合, 内部不会修改该值
|
||||
:param operator: 操作
|
||||
* -> Node
|
||||
# -> Asset
|
||||
|
||||
* [3]
|
||||
/ \
|
||||
* * [2]
|
||||
/ \
|
||||
* * [1]
|
||||
/ / \
|
||||
* [a] # # [b]
|
||||
|
||||
"""
|
||||
# 获取节点[1]祖先节点的 `key` 含自己,也就是[1, 2, 3]节点的`key`
|
||||
ancestor_keys = node.get_ancestor_keys(with_self=True)
|
||||
ancestors = Node.objects.filter(key__in=ancestor_keys).order_by('-key')
|
||||
to_update = []
|
||||
for ancestor in ancestors:
|
||||
# 迭代祖先节点的`key`,顺序是 [1] -> [2] -> [3]
|
||||
# 查询该节点及其后代节点是否包含要操作的资产,将包含的从要操作的
|
||||
# 资产集合中去掉,他们是重复节点,无论增加或删除都不会影响节点的资产数量
|
||||
|
||||
asset_pk_set -= set(Asset.objects.filter(
|
||||
id__in=asset_pk_set
|
||||
).filter(
|
||||
Q(nodes__key__istartswith=f'{ancestor.key}:') |
|
||||
Q(nodes__key=ancestor.key)
|
||||
).distinct().values_list('id', flat=True))
|
||||
if not asset_pk_set:
|
||||
# 要操作的资产集合为空,说明都是重复资产,不用改变节点资产数量
|
||||
# 而且既然它包含了,它的祖先节点肯定也包含了,所以祖先节点都不用
|
||||
# 处理了
|
||||
break
|
||||
ancestor.assets_amount = operator(F('assets_amount'), len(asset_pk_set))
|
||||
to_update.append(ancestor)
|
||||
Node.objects.bulk_update(to_update, fields=('assets_amount', 'parent_key'))
|
||||
100
apps/assets/signals_handler/node_assets_mapping.py
Normal file
100
apps/assets/signals_handler/node_assets_mapping.py
Normal file
@@ -0,0 +1,100 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import os
|
||||
import threading
|
||||
|
||||
from django.db.models.signals import (
|
||||
m2m_changed, post_save, post_delete
|
||||
)
|
||||
from django.dispatch import receiver
|
||||
from django.utils.functional import LazyObject
|
||||
|
||||
from common.signals import django_ready
|
||||
from common.utils.connection import RedisPubSub
|
||||
from common.utils import get_logger
|
||||
from assets.models import Asset, Node
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
# clear node assets mapping for memory
|
||||
# ------------------------------------
|
||||
|
||||
|
||||
def get_node_assets_mapping_for_memory_pub_sub():
|
||||
return RedisPubSub('fm.node_all_asset_ids_memory_mapping')
|
||||
|
||||
|
||||
class NodeAssetsMappingForMemoryPubSub(LazyObject):
|
||||
def _setup(self):
|
||||
self._wrapped = get_node_assets_mapping_for_memory_pub_sub()
|
||||
|
||||
|
||||
node_assets_mapping_for_memory_pub_sub = NodeAssetsMappingForMemoryPubSub()
|
||||
|
||||
|
||||
def expire_node_assets_mapping_for_memory(org_id):
|
||||
# 所有进程清除(自己的 memory 数据)
|
||||
org_id = str(org_id)
|
||||
root_org_id = Organization.ROOT_ID
|
||||
|
||||
# 当前进程清除(cache 数据)
|
||||
logger.debug(
|
||||
"Expire node assets id mapping from cache of org={}, pid={}"
|
||||
"".format(org_id, os.getpid())
|
||||
)
|
||||
Node.expire_node_all_asset_ids_mapping_from_cache(org_id)
|
||||
Node.expire_node_all_asset_ids_mapping_from_cache(root_org_id)
|
||||
|
||||
node_assets_mapping_for_memory_pub_sub.publish(org_id)
|
||||
node_assets_mapping_for_memory_pub_sub.publish(root_org_id)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Node)
|
||||
def on_node_post_create(sender, instance, created, update_fields, **kwargs):
|
||||
if created:
|
||||
need_expire = True
|
||||
elif update_fields and 'key' in update_fields:
|
||||
need_expire = True
|
||||
else:
|
||||
need_expire = False
|
||||
|
||||
if need_expire:
|
||||
expire_node_assets_mapping_for_memory(instance.org_id)
|
||||
|
||||
|
||||
@receiver(post_delete, sender=Node)
|
||||
def on_node_post_delete(sender, instance, **kwargs):
|
||||
expire_node_assets_mapping_for_memory(instance.org_id)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
def on_node_asset_change(sender, instance, **kwargs):
|
||||
expire_node_assets_mapping_for_memory(instance.org_id)
|
||||
|
||||
|
||||
@receiver(django_ready)
|
||||
def subscribe_node_assets_mapping_expire(sender, **kwargs):
|
||||
logger.debug("Start subscribe for expire node assets id mapping from memory")
|
||||
|
||||
def keep_subscribe():
|
||||
while True:
|
||||
try:
|
||||
subscribe = node_assets_mapping_for_memory_pub_sub.subscribe()
|
||||
for message in subscribe.listen():
|
||||
if message["type"] != "message":
|
||||
continue
|
||||
org_id = message['data'].decode()
|
||||
Node.expire_node_all_asset_ids_mapping_from_memory(org_id)
|
||||
logger.debug(
|
||||
"Expire node assets id mapping from memory of org={}, pid={}"
|
||||
"".format(str(org_id), os.getpid())
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f'subscribe_node_assets_mapping_expire: {e}')
|
||||
Node.expire_all_orgs_node_all_asset_ids_mapping_from_memory()
|
||||
|
||||
t = threading.Thread(target=keep_subscribe)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
@@ -12,6 +12,7 @@ __all__ = ['add_nodes_assets_to_system_users']
|
||||
@tmp_to_root_org()
|
||||
def add_nodes_assets_to_system_users(nodes_keys, system_users):
|
||||
from ..models import Node
|
||||
assets = Node.get_nodes_all_assets(nodes_keys).values_list('id', flat=True)
|
||||
nodes = Node.objects.filter(key__in=nodes_keys)
|
||||
assets = Node.get_nodes_all_assets(*nodes)
|
||||
for system_user in system_users:
|
||||
system_user.assets.add(*tuple(assets))
|
||||
|
||||
@@ -141,7 +141,8 @@ def gather_asset_users(assets, task_name=None):
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def gather_nodes_asset_users(nodes_key):
|
||||
assets = Node.get_nodes_all_assets(nodes_key)
|
||||
nodes = Node.objects.filter(key__in=nodes_key)
|
||||
assets = Node.get_nodes_all_assets(*nodes)
|
||||
assets_groups_by_100 = [assets[i:i+100] for i in range(0, len(assets), 100)]
|
||||
for _assets in assets_groups_by_100:
|
||||
gather_asset_users(_assets)
|
||||
|
||||
@@ -12,16 +12,24 @@ from common.utils import get_logger
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task(queue='celery_heavy_tasks')
|
||||
def check_node_assets_amount_task(org_id=Organization.ROOT_ID):
|
||||
try:
|
||||
with tmp_to_org(Organization.get_instance(org_id)):
|
||||
check_node_assets_amount()
|
||||
except AcquireFailed:
|
||||
logger.error(_('The task of self-checking is already running and cannot be started repeatedly'))
|
||||
@shared_task
|
||||
def check_node_assets_amount_task(org_id=None):
|
||||
if org_id is None:
|
||||
orgs = Organization.objects.all()
|
||||
else:
|
||||
orgs = [Organization.get_instance(org_id)]
|
||||
|
||||
for org in orgs:
|
||||
try:
|
||||
with tmp_to_org(org):
|
||||
check_node_assets_amount()
|
||||
except AcquireFailed:
|
||||
error = _('The task of self-checking is already running '
|
||||
'and cannot be started repeatedly')
|
||||
logger.error(error)
|
||||
|
||||
|
||||
@register_as_period_task(crontab='0 2 * * *')
|
||||
@shared_task(queue='celery_heavy_tasks')
|
||||
@shared_task
|
||||
def check_node_assets_amount_period_task():
|
||||
check_node_assets_amount_task()
|
||||
|
||||
@@ -32,11 +32,19 @@ def _dump_args(args: dict):
|
||||
|
||||
|
||||
def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
comment = system_user.name
|
||||
|
||||
if username is None:
|
||||
username = system_user.username
|
||||
|
||||
if system_user.username_same_with_user:
|
||||
from users.models import User
|
||||
user = User.objects.filter(username=username).only('name', 'username').first()
|
||||
if user:
|
||||
comment = f'{system_user.name}[{str(user)}]'
|
||||
|
||||
password = system_user.password
|
||||
public_key = system_user.public_key
|
||||
comment = system_user.name
|
||||
|
||||
groups = _split_by_comma(system_user.system_groups)
|
||||
|
||||
@@ -49,6 +57,7 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
'state': 'present',
|
||||
'home': system_user.home or Empty,
|
||||
'groups': groups or Empty,
|
||||
'expires': 99999,
|
||||
'comment': comment
|
||||
}
|
||||
|
||||
@@ -225,18 +234,18 @@ def push_system_user_util(system_user, assets, task_name, username=None):
|
||||
print(_("Hosts count: {}").format(len(_assets)))
|
||||
|
||||
id_asset_map = {_asset.id: _asset for _asset in _assets}
|
||||
assets_id = id_asset_map.keys()
|
||||
asset_ids = id_asset_map.keys()
|
||||
no_special_auth = []
|
||||
special_auth_set = set()
|
||||
|
||||
auth_books = AuthBook.objects.filter(username__in=usernames, asset_id__in=assets_id)
|
||||
auth_books = AuthBook.objects.filter(username__in=usernames, asset_id__in=asset_ids)
|
||||
|
||||
for auth_book in auth_books:
|
||||
special_auth_set.add((auth_book.username, auth_book.asset_id))
|
||||
|
||||
for _username in usernames:
|
||||
no_special_assets = []
|
||||
for asset_id in assets_id:
|
||||
for asset_id in asset_ids:
|
||||
if (_username, asset_id) not in special_auth_set:
|
||||
no_special_assets.append(id_asset_map[asset_id])
|
||||
if no_special_assets:
|
||||
@@ -281,12 +290,12 @@ def push_system_user_a_asset_manual(system_user, asset, username=None):
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@tmp_to_root_org()
|
||||
def push_system_user_to_assets(system_user_id, assets_id, username=None):
|
||||
def push_system_user_to_assets(system_user_id, asset_ids, username=None):
|
||||
"""
|
||||
推送系统用户到指定的若干资产上
|
||||
"""
|
||||
system_user = SystemUser.objects.get(id=system_user_id)
|
||||
assets = get_objects(Asset, assets_id)
|
||||
assets = get_objects(Asset, asset_ids)
|
||||
task_name = _("Push system users to assets: {}").format(system_user.name)
|
||||
|
||||
return push_system_user_util(system_user, assets, task_name, username=username)
|
||||
|
||||
@@ -25,10 +25,13 @@ def check_asset_can_run_ansible(asset):
|
||||
|
||||
|
||||
def check_system_user_can_run_ansible(system_user):
|
||||
if not system_user.is_need_push():
|
||||
msg = _("Push system user task skip, auto push not enable or "
|
||||
"protocol is not ssh or rdp: {}").format(system_user.name)
|
||||
logger.info(msg)
|
||||
if not system_user.auto_push:
|
||||
logger.warn(f'Push system user task skip, auto push not enable: system_user={system_user.name}')
|
||||
return False
|
||||
if not system_user.is_protocol_support_push:
|
||||
logger.warn(f'Push system user task skip, protocol not support: '
|
||||
f'system_user={system_user.name} protocol={system_user.protocol} '
|
||||
f'support_protocol={system_user.SUPPORT_PUSH_PROTOCOLS}')
|
||||
return False
|
||||
|
||||
# Push root as system user is dangerous
|
||||
@@ -37,10 +40,6 @@ def check_system_user_can_run_ansible(system_user):
|
||||
logger.info(msg)
|
||||
return False
|
||||
|
||||
# if system_user.protocol != "ssh":
|
||||
# msg = _("System user protocol not ssh: {}".format(system_user))
|
||||
# logger.info(msg)
|
||||
# return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
33
apps/assets/tests/tree.py
Normal file
33
apps/assets/tests/tree.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from assets.tree import Tree
|
||||
|
||||
|
||||
def test():
|
||||
from orgs.models import Organization
|
||||
from assets.models import Node, Asset
|
||||
import time
|
||||
Organization.objects.get(id='1863cf22-f666-474e-94aa-935fe175203c').change_to()
|
||||
|
||||
t1 = time.time()
|
||||
nodes = list(Node.objects.exclude(key__startswith='-').only('id', 'key', 'parent_key'))
|
||||
node_asset_id_pairs = Asset.nodes.through.objects.all().values_list('node_id', 'asset_id')
|
||||
t2 = time.time()
|
||||
node_asset_id_pairs = list(node_asset_id_pairs)
|
||||
tree = Tree(nodes, node_asset_id_pairs)
|
||||
tree.build_tree()
|
||||
tree.nodes = None
|
||||
tree.node_asset_id_pairs = None
|
||||
import pickle
|
||||
d = pickle.dumps(tree)
|
||||
print('------------', len(d))
|
||||
return tree
|
||||
tree.compute_tree_node_assets_amount()
|
||||
|
||||
print(f'校对算法准确性 ......')
|
||||
for node in nodes:
|
||||
tree_node = tree.key_tree_node_mapper[node.key]
|
||||
if tree_node.assets_amount != node.assets_amount:
|
||||
print(f'ERROR: {tree_node.assets_amount} {node.assets_amount}')
|
||||
# print(f'OK {tree_node.asset_amount} {node.assets_amount}')
|
||||
|
||||
print(f'数据库时间: {t2 - t1}')
|
||||
return tree
|
||||
@@ -2,7 +2,6 @@
|
||||
from django.urls import path, re_path
|
||||
from rest_framework_nested import routers
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
from django.db.transaction import non_atomic_requests
|
||||
|
||||
from common import api as capi
|
||||
|
||||
@@ -57,9 +56,9 @@ urlpatterns = [
|
||||
path('nodes/children/', api.NodeChildrenApi.as_view(), name='node-children-2'),
|
||||
path('nodes/<uuid:pk>/children/add/', api.NodeAddChildrenApi.as_view(), name='node-add-children'),
|
||||
path('nodes/<uuid:pk>/assets/', api.NodeAssetsApi.as_view(), name='node-assets'),
|
||||
path('nodes/<uuid:pk>/assets/add/', non_atomic_requests(api.NodeAddAssetsApi.as_view()), name='node-add-assets'),
|
||||
path('nodes/<uuid:pk>/assets/replace/', non_atomic_requests(api.MoveAssetsToNodeApi.as_view()), name='node-replace-assets'),
|
||||
path('nodes/<uuid:pk>/assets/remove/', non_atomic_requests(api.NodeRemoveAssetsApi.as_view()), name='node-remove-assets'),
|
||||
path('nodes/<uuid:pk>/assets/add/', api.NodeAddAssetsApi.as_view(), name='node-add-assets'),
|
||||
path('nodes/<uuid:pk>/assets/replace/', api.MoveAssetsToNodeApi.as_view(), name='node-replace-assets'),
|
||||
path('nodes/<uuid:pk>/assets/remove/', api.NodeRemoveAssetsApi.as_view(), name='node-remove-assets'),
|
||||
path('nodes/<uuid:pk>/tasks/', api.NodeTaskCreateApi.as_view(), name='node-task-create'),
|
||||
|
||||
path('gateways/<uuid:pk>/test-connective/', api.GatewayTestConnectionApi.as_view(), name='test-gateway-connective'),
|
||||
|
||||
@@ -1,41 +1,47 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
#
|
||||
import time
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
from common.utils import get_logger, dict_get_any, is_uuid, get_object_or_none
|
||||
from common.utils.lock import DistributedLock
|
||||
from collections import defaultdict
|
||||
from common.utils import get_logger, dict_get_any, is_uuid, get_object_or_none, timeit
|
||||
from common.http import is_true
|
||||
from .models import Asset, Node
|
||||
from common.struct import Stack
|
||||
from common.db.models import output_as_string
|
||||
from orgs.utils import ensure_in_real_or_default_org, current_org
|
||||
|
||||
from .locks import NodeTreeUpdateLock
|
||||
from .models import Node, Asset
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@DistributedLock(name="assets.node.check_node_assets_amount", blocking=False)
|
||||
@NodeTreeUpdateLock()
|
||||
@ensure_in_real_or_default_org
|
||||
def check_node_assets_amount():
|
||||
for node in Node.objects.all():
|
||||
logger.info(f'Check node assets amount: {node}')
|
||||
assets_amount = Asset.objects.filter(
|
||||
Q(nodes__key__istartswith=f'{node.key}:') | Q(nodes=node)
|
||||
).distinct().count()
|
||||
logger.info(f'Check node assets amount {current_org}')
|
||||
nodes = list(Node.objects.all().only('id', 'key', 'assets_amount'))
|
||||
nodeid_assetid_pairs = list(Asset.nodes.through.objects.all().values_list('node_id', 'asset_id'))
|
||||
|
||||
nodekey_assetids_mapper = defaultdict(set)
|
||||
nodeid_nodekey_mapper = {}
|
||||
for node in nodes:
|
||||
nodeid_nodekey_mapper[node.id] = node.key
|
||||
|
||||
for nodeid, assetid in nodeid_assetid_pairs:
|
||||
if nodeid not in nodeid_nodekey_mapper:
|
||||
continue
|
||||
nodekey = nodeid_nodekey_mapper[nodeid]
|
||||
nodekey_assetids_mapper[nodekey].add(assetid)
|
||||
|
||||
util = NodeAssetsUtil(nodes, nodekey_assetids_mapper)
|
||||
util.generate()
|
||||
|
||||
to_updates = []
|
||||
for node in nodes:
|
||||
assets_amount = util.get_assets_amount(node.key)
|
||||
if node.assets_amount != assets_amount:
|
||||
logger.warn(f'Node wrong assets amount <Node:{node.key}> '
|
||||
f'{node.assets_amount} right is {assets_amount}')
|
||||
logger.error(f'Node[{node.key}] assets amount error {node.assets_amount} != {assets_amount}')
|
||||
node.assets_amount = assets_amount
|
||||
node.save()
|
||||
# 防止自检程序给数据库的压力太大
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
def is_asset_exists_in_node(asset_pk, node_key):
|
||||
return Asset.objects.filter(
|
||||
id=asset_pk
|
||||
).filter(
|
||||
Q(nodes__key__istartswith=f'{node_key}:') | Q(nodes__key=node_key)
|
||||
).exists()
|
||||
to_updates.append(node)
|
||||
Node.objects.bulk_update(to_updates, fields=('assets_amount',))
|
||||
|
||||
|
||||
def is_query_node_all_assets(request):
|
||||
@@ -57,3 +63,77 @@ def get_node(request):
|
||||
else:
|
||||
node = get_object_or_none(Node, key=node_id)
|
||||
return node
|
||||
|
||||
|
||||
class NodeAssetsInfo:
|
||||
__slots__ = ('key', 'assets_amount', 'assets')
|
||||
|
||||
def __init__(self, key, assets_amount, assets):
|
||||
self.key = key
|
||||
self.assets_amount = assets_amount
|
||||
self.assets = assets
|
||||
|
||||
def __str__(self):
|
||||
return self.key
|
||||
|
||||
|
||||
class NodeAssetsUtil:
|
||||
def __init__(self, nodes, nodekey_assetsid_mapper):
|
||||
"""
|
||||
:param nodes: 节点
|
||||
:param nodekey_assetsid_mapper: 节点直接资产id的映射 {"key1": set(), "key2": set()}
|
||||
"""
|
||||
self.nodes = nodes
|
||||
# node_id --> set(asset_id1, asset_id2)
|
||||
self.nodekey_assetsid_mapper = nodekey_assetsid_mapper
|
||||
self.nodekey_assetsinfo_mapper = {}
|
||||
|
||||
@timeit
|
||||
def generate(self):
|
||||
# 准备排序好的资产信息数据
|
||||
infos = []
|
||||
for node in self.nodes:
|
||||
assets = self.nodekey_assetsid_mapper.get(node.key, set())
|
||||
info = NodeAssetsInfo(key=node.key, assets_amount=0, assets=assets)
|
||||
infos.append(info)
|
||||
infos = sorted(infos, key=lambda i: [int(i) for i in i.key.split(':')])
|
||||
# 这个守卫需要添加一下,避免最后一个无法出栈
|
||||
guarder = NodeAssetsInfo(key='', assets_amount=0, assets=set())
|
||||
infos.append(guarder)
|
||||
|
||||
stack = Stack()
|
||||
for info in infos:
|
||||
# 如果栈顶的不是这个节点的父祖节点,那么可以出栈了,可以计算资产数量了
|
||||
while stack.top and not info.key.startswith(f'{stack.top.key}:'):
|
||||
pop_info = stack.pop()
|
||||
pop_info.assets_amount = len(pop_info.assets)
|
||||
self.nodekey_assetsinfo_mapper[pop_info.key] = pop_info
|
||||
if not stack.top:
|
||||
continue
|
||||
stack.top.assets.update(pop_info.assets)
|
||||
stack.push(info)
|
||||
|
||||
def get_assets_by_key(self, key):
|
||||
info = self.nodekey_assetsinfo_mapper[key]
|
||||
return info['assets']
|
||||
|
||||
def get_assets_amount(self, key):
|
||||
info = self.nodekey_assetsinfo_mapper[key]
|
||||
return info.assets_amount
|
||||
|
||||
@classmethod
|
||||
def test_it(cls):
|
||||
from assets.models import Node, Asset
|
||||
|
||||
nodes = list(Node.objects.all())
|
||||
nodes_assets = Asset.nodes.through.objects.all()\
|
||||
.annotate(aid=output_as_string('asset_id'))\
|
||||
.values_list('node__key', 'aid')
|
||||
|
||||
mapping = defaultdict(set)
|
||||
for key, asset_id in nodes_assets:
|
||||
mapping[key].add(asset_id)
|
||||
|
||||
util = cls(nodes, mapping)
|
||||
util.generate()
|
||||
return util
|
||||
|
||||
@@ -100,6 +100,8 @@ class CommandExecutionViewSet(ListModelMixin, OrgGenericViewSet):
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
if current_org.is_root():
|
||||
return queryset
|
||||
queryset = queryset.filter(run_as__org_id=current_org.org_id())
|
||||
return queryset
|
||||
|
||||
|
||||
18
apps/audits/migrations/0012_auto_20210414_1443.py
Normal file
18
apps/audits/migrations/0012_auto_20210414_1443.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1 on 2021-04-14 06:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0011_userloginlog_backend'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='userloginlog',
|
||||
name='type',
|
||||
field=models.CharField(choices=[('W', 'Web'), ('T', 'Terminal'), ('U', 'Unknown')], max_length=2, verbose_name='Login type'),
|
||||
),
|
||||
]
|
||||
@@ -79,6 +79,7 @@ class UserLoginLog(models.Model):
|
||||
LOGIN_TYPE_CHOICE = (
|
||||
('W', 'Web'),
|
||||
('T', 'Terminal'),
|
||||
('U', 'Unknown'),
|
||||
)
|
||||
|
||||
MFA_DISABLED = 0
|
||||
|
||||
@@ -64,6 +64,9 @@ class SessionAuditSerializer(serializers.ModelSerializer):
|
||||
|
||||
class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
is_success = serializers.BooleanField(read_only=True, label=_('Is success'))
|
||||
hosts_display = serializers.ListSerializer(
|
||||
child=serializers.CharField(), source='hosts', read_only=True, label=_('Hosts for display')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = CommandExecution
|
||||
@@ -72,7 +75,7 @@ class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
'run_as', 'command', 'user', 'is_finished',
|
||||
'date_start', 'result', 'is_success', 'org_id'
|
||||
]
|
||||
fields = fields_small + ['hosts', 'run_as_display', 'user_display']
|
||||
fields = fields_small + ['hosts', 'hosts_display', 'run_as_display', 'user_display']
|
||||
extra_kwargs = {
|
||||
'result': {'label': _('Result')}, # model 上的方法,只能在这修改
|
||||
'is_success': {'label': _('Is success')},
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.utils.functional import LazyObject
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
@@ -34,17 +35,23 @@ MODELS_NEED_RECORD = (
|
||||
)
|
||||
|
||||
|
||||
LOGIN_BACKEND = {
|
||||
'PublicKeyAuthBackend': _('SSH Key'),
|
||||
'RadiusBackend': User.Source.radius.label,
|
||||
'RadiusRealmBackend': User.Source.radius.label,
|
||||
'LDAPAuthorizationBackend': User.Source.ldap.label,
|
||||
'ModelBackend': _('Password'),
|
||||
'SSOAuthentication': _('SSO'),
|
||||
'CASBackend': User.Source.cas.label,
|
||||
'OIDCAuthCodeBackend': User.Source.openid.label,
|
||||
'OIDCAuthPasswordBackend': User.Source.openid.label,
|
||||
}
|
||||
class AuthBackendLabelMapping(LazyObject):
|
||||
@staticmethod
|
||||
def get_login_backends():
|
||||
backend_label_mapping = {}
|
||||
for source, backends in User.SOURCE_BACKEND_MAPPING.items():
|
||||
for backend in backends:
|
||||
backend_label_mapping[backend] = source.label
|
||||
backend_label_mapping[settings.AUTH_BACKEND_PUBKEY] = _('SSH Key')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_MODEL] = _('Password')
|
||||
backend_label_mapping[settings.AUTH_BACKEND_SSO] = _('SSO')
|
||||
return backend_label_mapping
|
||||
|
||||
def _setup(self):
|
||||
self._wrapped = self.get_login_backends()
|
||||
|
||||
|
||||
AUTH_BACKEND_LABEL_MAPPING = AuthBackendLabelMapping()
|
||||
|
||||
|
||||
def create_operate_log(action, sender, resource):
|
||||
@@ -70,6 +77,7 @@ def create_operate_log(action, sender, resource):
|
||||
|
||||
@receiver(post_save)
|
||||
def on_object_created_or_update(sender, instance=None, created=False, update_fields=None, **kwargs):
|
||||
# last_login 改变是最后登录日期, 每次登录都会改变
|
||||
if instance._meta.object_name == 'User' and \
|
||||
update_fields and 'last_login' in update_fields:
|
||||
return
|
||||
@@ -125,21 +133,20 @@ def on_audits_log_create(sender, instance=None, **kwargs):
|
||||
|
||||
|
||||
def get_login_backend(request):
|
||||
backend = request.session.get('auth_backend', '') or request.session.get(BACKEND_SESSION_KEY, '')
|
||||
backend = request.session.get('auth_backend', '') or \
|
||||
request.session.get(BACKEND_SESSION_KEY, '')
|
||||
|
||||
backend = backend.rsplit('.', maxsplit=1)[-1]
|
||||
if backend in LOGIN_BACKEND:
|
||||
return LOGIN_BACKEND[backend]
|
||||
else:
|
||||
logger.warn(f'LOGIN_BACKEND_NOT_FOUND: {backend}')
|
||||
return ''
|
||||
backend_label = AUTH_BACKEND_LABEL_MAPPING.get(backend, None)
|
||||
if backend_label is None:
|
||||
backend_label = ''
|
||||
return backend_label
|
||||
|
||||
|
||||
def generate_data(username, request):
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
||||
login_ip = get_request_ip(request) or '0.0.0.0'
|
||||
if isinstance(request, Request):
|
||||
login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', '')
|
||||
login_type = request.META.get('HTTP_X_JMS_LOGIN_TYPE', 'U')
|
||||
else:
|
||||
login_type = 'W'
|
||||
|
||||
@@ -147,7 +154,7 @@ def generate_data(username, request):
|
||||
'username': username,
|
||||
'ip': login_ip,
|
||||
'type': login_type,
|
||||
'user_agent': user_agent,
|
||||
'user_agent': user_agent[0:254],
|
||||
'datetime': timezone.now(),
|
||||
'backend': get_login_backend(request)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
from .auth import *
|
||||
from .connection_token import *
|
||||
from .token import *
|
||||
from .mfa import *
|
||||
from .access_key import *
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import uuid
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.permissions import IsOrgAdminOrAppUser
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser
|
||||
|
||||
logger = get_logger(__name__)
|
||||
__all__ = [
|
||||
'UserConnectionTokenApi',
|
||||
]
|
||||
|
||||
|
||||
class UserConnectionTokenApi(RootOrgViewMixin, APIView):
|
||||
permission_classes = (IsOrgAdminOrAppUser,)
|
||||
|
||||
def post(self, request):
|
||||
user_id = request.data.get('user', '')
|
||||
asset_id = request.data.get('asset', '')
|
||||
system_user_id = request.data.get('system_user', '')
|
||||
token = str(uuid.uuid4())
|
||||
user = get_object_or_404(User, id=user_id)
|
||||
asset = get_object_or_404(Asset, id=asset_id)
|
||||
system_user = get_object_or_404(SystemUser, id=system_user_id)
|
||||
value = {
|
||||
'user': user_id,
|
||||
'username': user.username,
|
||||
'asset': asset_id,
|
||||
'hostname': asset.hostname,
|
||||
'system_user': system_user_id,
|
||||
'system_user_name': system_user.name
|
||||
}
|
||||
cache.set(token, value, timeout=20)
|
||||
return Response({"token": token}, status=201)
|
||||
|
||||
def get(self, request):
|
||||
token = request.query_params.get('token')
|
||||
user_only = request.query_params.get('user-only', None)
|
||||
value = cache.get(token, None)
|
||||
|
||||
if not value:
|
||||
return Response('', status=404)
|
||||
|
||||
if not user_only:
|
||||
return Response(value)
|
||||
else:
|
||||
return Response({'user': value['user']})
|
||||
242
apps/authentication/api/connection_token.py
Normal file
242
apps/authentication/api/connection_token.py
Normal file
@@ -0,0 +1,242 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.http import HttpResponse
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from common.utils import get_logger, random_string
|
||||
from common.drf.api import SerializerMixin2
|
||||
from common.permissions import IsSuperUserOrAppUser, IsValidUser, IsSuperUser
|
||||
|
||||
from orgs.mixins.api import RootOrgViewMixin
|
||||
|
||||
from ..serializers import (
|
||||
ConnectionTokenSerializer, ConnectionTokenSecretSerializer,
|
||||
RDPFileSerializer
|
||||
)
|
||||
|
||||
logger = get_logger(__name__)
|
||||
__all__ = ['UserConnectionTokenViewSet']
|
||||
|
||||
|
||||
class UserConnectionTokenViewSet(RootOrgViewMixin, SerializerMixin2, GenericViewSet):
|
||||
permission_classes = (IsSuperUserOrAppUser,)
|
||||
serializer_classes = {
|
||||
'default': ConnectionTokenSerializer,
|
||||
'get_secret_detail': ConnectionTokenSecretSerializer,
|
||||
'get_rdp_file': RDPFileSerializer
|
||||
}
|
||||
CACHE_KEY_PREFIX = 'CONNECTION_TOKEN_{}'
|
||||
|
||||
@staticmethod
|
||||
def check_resource_permission(user, asset, application, system_user):
|
||||
from perms.utils.asset import has_asset_system_permission
|
||||
from perms.utils.application import has_application_system_permission
|
||||
if asset and not has_asset_system_permission(user, asset, system_user):
|
||||
error = f'User not has this asset and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} asset={asset.id}'
|
||||
raise PermissionDenied(error)
|
||||
if application and not has_application_system_permission(user, application, system_user):
|
||||
error = f'User not has this application and system user permission: ' \
|
||||
f'user={user.id} system_user={system_user.id} application={application.id}'
|
||||
raise PermissionDenied(error)
|
||||
return True
|
||||
|
||||
def create_token(self, user, asset, application, system_user):
|
||||
if not settings.CONNECTION_TOKEN_ENABLED:
|
||||
raise PermissionDenied('Connection token disabled')
|
||||
if not user:
|
||||
user = self.request.user
|
||||
if not self.request.user.is_superuser and user != self.request.user:
|
||||
raise PermissionDenied('Only super user can create user token')
|
||||
self.check_resource_permission(user, asset, application, system_user)
|
||||
token = random_string(36)
|
||||
value = {
|
||||
'user': str(user.id),
|
||||
'username': user.username,
|
||||
'system_user': str(system_user.id),
|
||||
'system_user_name': system_user.name
|
||||
}
|
||||
|
||||
if asset:
|
||||
value.update({
|
||||
'type': 'asset',
|
||||
'asset': str(asset.id),
|
||||
'hostname': asset.hostname,
|
||||
})
|
||||
elif application:
|
||||
value.update({
|
||||
'type': 'application',
|
||||
'application': application.id,
|
||||
'application_name': str(application)
|
||||
})
|
||||
|
||||
key = self.CACHE_KEY_PREFIX.format(token)
|
||||
cache.set(key, value, timeout=20)
|
||||
return token
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
asset = serializer.validated_data.get('asset')
|
||||
application = serializer.validated_data.get('application')
|
||||
system_user = serializer.validated_data['system_user']
|
||||
user = serializer.validated_data.get('user')
|
||||
token = self.create_token(user, asset, application, system_user)
|
||||
return Response({"token": token}, status=201)
|
||||
|
||||
@action(methods=['POST', 'GET'], detail=False, url_path='rdp/file')
|
||||
def get_rdp_file(self, request, *args, **kwargs):
|
||||
options = {
|
||||
'full address:s': '',
|
||||
'username:s': '',
|
||||
'screen mode id:i': '0',
|
||||
'desktopwidth:i': '1280',
|
||||
'desktopheight:i': '800',
|
||||
'use multimon:i': '1',
|
||||
'session bpp:i': '24',
|
||||
'audiomode:i': '0',
|
||||
'disable wallpaper:i': '0',
|
||||
'disable full window drag:i': '0',
|
||||
'disable menu anims:i': '0',
|
||||
'disable themes:i': '0',
|
||||
'alternate shell:s': '',
|
||||
'shell working directory:s': '',
|
||||
'authentication level:i': '2',
|
||||
'connect to console:i': '0',
|
||||
'disable cursor setting:i': '0',
|
||||
'allow font smoothing:i': '1',
|
||||
'allow desktop composition:i': '1',
|
||||
'redirectprinters:i': '0',
|
||||
'prompt for credentials on client:i': '0',
|
||||
'autoreconnection enabled:i': '1',
|
||||
'bookmarktype:i': '3',
|
||||
'use redirection server name:i': '0',
|
||||
# 'alternate shell:s:': '||MySQLWorkbench',
|
||||
# 'remoteapplicationname:s': 'Firefox',
|
||||
# 'remoteapplicationcmdline:s': '',
|
||||
}
|
||||
if self.request.method == 'GET':
|
||||
data = self.request.query_params
|
||||
else:
|
||||
data = request.data
|
||||
serializer = self.get_serializer(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
asset = serializer.validated_data.get('asset')
|
||||
application = serializer.validated_data.get('application')
|
||||
system_user = serializer.validated_data['system_user']
|
||||
user = serializer.validated_data.get('user')
|
||||
height = serializer.validated_data.get('height')
|
||||
width = serializer.validated_data.get('width')
|
||||
token = self.create_token(user, asset, application, system_user)
|
||||
|
||||
# Todo: 上线后地址是 JumpServerAddr:3389
|
||||
address = self.request.query_params.get('address') or '1.1.1.1'
|
||||
options['full address:s'] = address
|
||||
options['username:s'] = '{}@{}'.format(user.username, token)
|
||||
options['desktopwidth:i'] = width
|
||||
options['desktopheight:i'] = height
|
||||
data = ''
|
||||
for k, v in options.items():
|
||||
data += f'{k}:{v}\n'
|
||||
response = HttpResponse(data, content_type='text/plain')
|
||||
filename = "{}-{}-jumpserver.rdp".format(user.username, asset.hostname)
|
||||
response['Content-Disposition'] = 'attachment; filename={}'.format(filename)
|
||||
return response
|
||||
|
||||
@staticmethod
|
||||
def _get_application_secret_detail(value):
|
||||
from applications.models import Application
|
||||
from perms.models import Action
|
||||
application = get_object_or_404(Application, id=value.get('application'))
|
||||
gateway = None
|
||||
|
||||
if not application.category_remote_app:
|
||||
actions = Action.NONE
|
||||
remote_app = {}
|
||||
asset = None
|
||||
domain = application.domain
|
||||
else:
|
||||
remote_app = application.get_rdp_remote_app_setting()
|
||||
actions = Action.CONNECT
|
||||
asset = application.get_remote_app_asset()
|
||||
domain = asset.domain
|
||||
|
||||
if domain and domain.has_gateway():
|
||||
gateway = domain.random_gateway()
|
||||
|
||||
return {
|
||||
'asset': asset,
|
||||
'application': application,
|
||||
'gateway': gateway,
|
||||
'remote_app': remote_app,
|
||||
'actions': actions
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _get_asset_secret_detail(value, user, system_user):
|
||||
from assets.models import Asset
|
||||
from perms.utils.asset import get_asset_system_user_ids_with_actions_by_user
|
||||
asset = get_object_or_404(Asset, id=value.get('asset'))
|
||||
systemuserid_actions_mapper = get_asset_system_user_ids_with_actions_by_user(user, asset)
|
||||
actions = systemuserid_actions_mapper.get(system_user.id, [])
|
||||
gateway = None
|
||||
if asset and asset.domain and asset.domain.has_gateway():
|
||||
gateway = asset.domain.random_gateway()
|
||||
return {
|
||||
'asset': asset,
|
||||
'application': None,
|
||||
'gateway': gateway,
|
||||
'remote_app': None,
|
||||
'actions': actions,
|
||||
}
|
||||
|
||||
@action(methods=['POST'], detail=False, permission_classes=[IsSuperUserOrAppUser], url_path='secret-info/detail')
|
||||
def get_secret_detail(self, request, *args, **kwargs):
|
||||
from users.models import User
|
||||
from assets.models import SystemUser
|
||||
|
||||
token = request.data.get('token', '')
|
||||
key = self.CACHE_KEY_PREFIX.format(token)
|
||||
value = cache.get(key, None)
|
||||
if not value:
|
||||
return Response(status=404)
|
||||
user = get_object_or_404(User, id=value.get('user'))
|
||||
system_user = get_object_or_404(SystemUser, id=value.get('system_user'))
|
||||
data = dict(user=user, system_user=system_user)
|
||||
|
||||
if value.get('type') == 'asset':
|
||||
asset_detail = self._get_asset_secret_detail(value, user=user, system_user=system_user)
|
||||
data['type'] = 'asset'
|
||||
data.update(asset_detail)
|
||||
else:
|
||||
app_detail = self._get_application_secret_detail(value)
|
||||
data['type'] = 'application'
|
||||
data.update(app_detail)
|
||||
|
||||
serializer = self.get_serializer(data)
|
||||
return Response(data=serializer.data, status=200)
|
||||
|
||||
def get_permissions(self):
|
||||
if self.action in ["create", "get_rdp_file"]:
|
||||
if self.request.data.get('user', None):
|
||||
self.permission_classes = (IsSuperUser,)
|
||||
else:
|
||||
self.permission_classes = (IsValidUser,)
|
||||
return super().get_permissions()
|
||||
|
||||
def get(self, request):
|
||||
token = request.query_params.get('token')
|
||||
key = self.CACHE_KEY_PREFIX.format(token)
|
||||
value = cache.get(key, None)
|
||||
|
||||
if not value:
|
||||
return Response('', status=404)
|
||||
return Response(value)
|
||||
@@ -3,10 +3,10 @@
|
||||
from rest_framework.generics import UpdateAPIView
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework.permissions import AllowAny
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from common.utils import get_logger, get_object_or_none
|
||||
from common.utils import get_logger
|
||||
from common.permissions import IsOrgAdmin
|
||||
from ..models import LoginConfirmSetting
|
||||
from ..serializers import LoginConfirmSettingSerializer
|
||||
@@ -32,7 +32,7 @@ class LoginConfirmSettingUpdateApi(UpdateAPIView):
|
||||
|
||||
|
||||
class TicketStatusApi(mixins.AuthMixin, APIView):
|
||||
permission_classes = ()
|
||||
permission_classes = (AllowAny,)
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
try:
|
||||
|
||||
@@ -29,7 +29,7 @@ class MFAChallengeApi(AuthMixin, CreateAPIView):
|
||||
if not valid:
|
||||
self.request.session['auth_mfa'] = ''
|
||||
raise errors.MFAFailedError(
|
||||
username=user.username, request=self.request
|
||||
username=user.username, request=self.request, ip=self.get_request_ip()
|
||||
)
|
||||
else:
|
||||
self.request.session['auth_mfa'] = '1'
|
||||
|
||||
@@ -7,6 +7,7 @@ from django.http.response import HttpResponseRedirect
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.permissions import AllowAny
|
||||
|
||||
from common.utils.timezone import utcnow
|
||||
from common.const.http import POST, GET
|
||||
@@ -31,6 +32,7 @@ class SSOViewSet(AuthMixin, JmsGenericViewSet):
|
||||
'login_url': SSOTokenSerializer,
|
||||
'login': EmptySerializer
|
||||
}
|
||||
permission_classes = (IsSuperUser,)
|
||||
|
||||
@action(methods=[POST], detail=False, permission_classes=[IsSuperUser], url_path='login-url')
|
||||
def login_url(self, request, *args, **kwargs):
|
||||
@@ -54,7 +56,7 @@ class SSOViewSet(AuthMixin, JmsGenericViewSet):
|
||||
login_url = '%s?%s' % (reverse('api-auth:sso-login', external=True), urlencode(query))
|
||||
return Response(data={'login_url': login_url})
|
||||
|
||||
@action(methods=[GET], detail=False, filter_backends=[AuthKeyQueryDeclaration], permission_classes=[])
|
||||
@action(methods=[GET], detail=False, filter_backends=[AuthKeyQueryDeclaration], permission_classes=[AllowAny])
|
||||
def login(self, request: Request, *args, **kwargs):
|
||||
"""
|
||||
此接口违反了 `Restful` 的规范
|
||||
|
||||
@@ -27,6 +27,39 @@ class LDAPAuthorizationBackend(LDAPBackend):
|
||||
is_valid = getattr(user, 'is_valid', None)
|
||||
return is_valid or is_valid is None
|
||||
|
||||
def get_or_build_user(self, username, ldap_user):
|
||||
"""
|
||||
This must return a (User, built) 2-tuple for the given LDAP user.
|
||||
|
||||
username is the Django-friendly username of the user. ldap_user.dn is
|
||||
the user's DN and ldap_user.attrs contains all of their LDAP
|
||||
attributes.
|
||||
|
||||
The returned User object may be an unsaved model instance.
|
||||
|
||||
"""
|
||||
model = self.get_user_model()
|
||||
|
||||
if self.settings.USER_QUERY_FIELD:
|
||||
query_field = self.settings.USER_QUERY_FIELD
|
||||
query_value = ldap_user.attrs[self.settings.USER_ATTR_MAP[query_field]][0]
|
||||
query_value = query_value.strip()
|
||||
lookup = query_field
|
||||
else:
|
||||
query_field = model.USERNAME_FIELD
|
||||
query_value = username.lower()
|
||||
lookup = "{}__iexact".format(query_field)
|
||||
|
||||
try:
|
||||
user = model.objects.get(**{lookup: query_value})
|
||||
except model.DoesNotExist:
|
||||
user = model(**{query_field: query_value})
|
||||
built = True
|
||||
else:
|
||||
built = False
|
||||
|
||||
return (user, built)
|
||||
|
||||
def pre_check(self, username, password):
|
||||
if not settings.AUTH_LDAP:
|
||||
error = 'Not enabled auth ldap'
|
||||
@@ -128,6 +161,7 @@ class LDAPUser(_LDAPUser):
|
||||
for field, attr in self.settings.USER_ATTR_MAP.items():
|
||||
try:
|
||||
value = self.attrs[attr][0]
|
||||
value = value.strip()
|
||||
if attr.lower() == 'useraccountcontrol' \
|
||||
and field == 'is_active' and value:
|
||||
value = int(value) & LDAP_AD_ACCOUNT_DISABLE \
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
import traceback
|
||||
|
||||
from django.contrib.auth import get_user_model, authenticate
|
||||
from django.contrib.auth import get_user_model
|
||||
from radiusauth.backends import RADIUSBackend, RADIUSRealmBackend
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
@@ -6,9 +6,7 @@ from django.conf import settings
|
||||
|
||||
from common.exceptions import JMSException
|
||||
from .signals import post_auth_failed
|
||||
from users.utils import (
|
||||
increase_login_failed_count, get_login_failed_count
|
||||
)
|
||||
from users.utils import LoginBlockUtil, MFABlockUtils
|
||||
|
||||
reason_password_failed = 'password_failed'
|
||||
reason_password_decrypt_failed = 'password_decrypt_failed'
|
||||
@@ -18,6 +16,9 @@ reason_user_not_exist = 'user_not_exist'
|
||||
reason_password_expired = 'password_expired'
|
||||
reason_user_invalid = 'user_invalid'
|
||||
reason_user_inactive = 'user_inactive'
|
||||
reason_user_expired = 'user_expired'
|
||||
reason_backend_not_match = 'backend_not_match'
|
||||
reason_acl_not_allow = 'acl_not_allow'
|
||||
|
||||
reason_choices = {
|
||||
reason_password_failed: _('Username/password check failed'),
|
||||
@@ -27,7 +28,10 @@ reason_choices = {
|
||||
reason_user_not_exist: _("Username does not exist"),
|
||||
reason_password_expired: _("Password expired"),
|
||||
reason_user_invalid: _('Disabled or expired'),
|
||||
reason_user_inactive: _("This account is inactive.")
|
||||
reason_user_inactive: _("This account is inactive."),
|
||||
reason_user_expired: _("This account is expired"),
|
||||
reason_backend_not_match: _("Auth backend not match"),
|
||||
reason_acl_not_allow: _("ACL is not allowed"),
|
||||
}
|
||||
old_reason_choices = {
|
||||
'0': '-',
|
||||
@@ -48,7 +52,15 @@ block_login_msg = _(
|
||||
"The account has been locked "
|
||||
"(please contact admin to unlock it or try again after {} minutes)"
|
||||
)
|
||||
mfa_failed_msg = _("MFA code invalid, or ntp sync server time")
|
||||
block_mfa_msg = _(
|
||||
"The account has been locked "
|
||||
"(please contact admin to unlock it or try again after {} minutes)"
|
||||
)
|
||||
mfa_failed_msg = _(
|
||||
"MFA code invalid, or ntp sync server time, "
|
||||
"You can also try {times_try} times "
|
||||
"(The account will be temporarily locked for {block_time} minutes)"
|
||||
)
|
||||
|
||||
mfa_required_msg = _("MFA required")
|
||||
mfa_unset_msg = _("MFA not set, please set it first")
|
||||
@@ -76,7 +88,7 @@ class AuthFailedNeedBlockMixin:
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
increase_login_failed_count(self.username, self.ip)
|
||||
LoginBlockUtil(self.username, self.ip).incr_failed_count()
|
||||
|
||||
|
||||
class AuthFailedError(Exception):
|
||||
@@ -103,13 +115,12 @@ class AuthFailedError(Exception):
|
||||
class CredentialError(AuthFailedNeedLogMixin, AuthFailedNeedBlockMixin, AuthFailedError):
|
||||
def __init__(self, error, username, ip, request):
|
||||
super().__init__(error=error, username=username, ip=ip, request=request)
|
||||
times_up = settings.SECURITY_LOGIN_LIMIT_COUNT
|
||||
times_failed = get_login_failed_count(username, ip)
|
||||
times_try = int(times_up) - int(times_failed)
|
||||
util = LoginBlockUtil(username, ip)
|
||||
times_remainder = util.get_remainder_times()
|
||||
block_time = settings.SECURITY_LOGIN_LIMIT_TIME
|
||||
|
||||
default_msg = invalid_login_msg.format(
|
||||
times_try=times_try, block_time=block_time
|
||||
times_try=times_remainder, block_time=block_time
|
||||
)
|
||||
if error == reason_password_failed:
|
||||
self.msg = default_msg
|
||||
@@ -119,12 +130,32 @@ class CredentialError(AuthFailedNeedLogMixin, AuthFailedNeedBlockMixin, AuthFail
|
||||
|
||||
class MFAFailedError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
error = reason_mfa_failed
|
||||
msg = mfa_failed_msg
|
||||
msg: str
|
||||
|
||||
def __init__(self, username, request):
|
||||
def __init__(self, username, request, ip):
|
||||
util = MFABlockUtils(username, ip)
|
||||
util.incr_failed_count()
|
||||
|
||||
times_remainder = util.get_remainder_times()
|
||||
block_time = settings.SECURITY_LOGIN_LIMIT_TIME
|
||||
|
||||
if times_remainder:
|
||||
self.msg = mfa_failed_msg.format(
|
||||
times_try=times_remainder, block_time=block_time
|
||||
)
|
||||
else:
|
||||
self.msg = block_mfa_msg.format(settings.SECURITY_LOGIN_LIMIT_TIME)
|
||||
super().__init__(username=username, request=request)
|
||||
|
||||
|
||||
class BlockMFAError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
error = 'block_mfa'
|
||||
|
||||
def __init__(self, username, request, ip):
|
||||
self.msg = block_mfa_msg.format(settings.SECURITY_LOGIN_LIMIT_TIME)
|
||||
super().__init__(username=username, request=request, ip=ip)
|
||||
|
||||
|
||||
class MFAUnsetError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
error = reason_mfa_unset
|
||||
msg = mfa_unset_msg
|
||||
@@ -180,6 +211,28 @@ class MFARequiredError(NeedMoreInfoError):
|
||||
}
|
||||
|
||||
|
||||
class ACLError(AuthFailedNeedLogMixin, AuthFailedError):
|
||||
msg = reason_acl_not_allow
|
||||
error = 'acl_error'
|
||||
|
||||
def __init__(self, msg, **kwargs):
|
||||
self.msg = msg
|
||||
super().__init__(**kwargs)
|
||||
|
||||
def as_data(self):
|
||||
return {
|
||||
"error": reason_acl_not_allow,
|
||||
"msg": self.msg
|
||||
}
|
||||
|
||||
|
||||
class LoginIPNotAllowed(ACLError):
|
||||
def __init__(self, username, request, **kwargs):
|
||||
self.username = username
|
||||
self.request = request
|
||||
super().__init__(_("IP is not allowed"), **kwargs)
|
||||
|
||||
|
||||
class LoginConfirmBaseError(NeedMoreInfoError):
|
||||
def __init__(self, ticket_id, **kwargs):
|
||||
self.ticket_id = ticket_id
|
||||
|
||||
@@ -8,11 +8,26 @@ from captcha.fields import CaptchaField, CaptchaTextInput
|
||||
|
||||
|
||||
class UserLoginForm(forms.Form):
|
||||
username = forms.CharField(label=_('Username'), max_length=100)
|
||||
days_auto_login = int(settings.SESSION_COOKIE_AGE / 3600 / 24)
|
||||
disable_days_auto_login = settings.SESSION_EXPIRE_AT_BROWSER_CLOSE_FORCE or days_auto_login < 1
|
||||
|
||||
username = forms.CharField(
|
||||
label=_('Username'), max_length=100,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("Username"),
|
||||
'autofocus': 'autofocus'
|
||||
})
|
||||
)
|
||||
password = forms.CharField(
|
||||
label=_('Password'), widget=forms.PasswordInput,
|
||||
max_length=1024, strip=False
|
||||
)
|
||||
auto_login = forms.BooleanField(
|
||||
label=_("{} days auto login").format(days_auto_login or 1),
|
||||
required=False, initial=False, widget=forms.CheckboxInput(
|
||||
attrs={'disabled': disable_days_auto_login}
|
||||
)
|
||||
)
|
||||
|
||||
def confirm_login_allowed(self, user):
|
||||
if not user.is_staff:
|
||||
@@ -35,8 +50,13 @@ class CaptchaMixin(forms.Form):
|
||||
|
||||
|
||||
class ChallengeMixin(forms.Form):
|
||||
challenge = forms.CharField(label=_('MFA code'), max_length=6,
|
||||
required=False)
|
||||
challenge = forms.CharField(
|
||||
label=_('MFA code'), max_length=6, required=False,
|
||||
widget=forms.TextInput(attrs={
|
||||
'placeholder': _("MFA code"),
|
||||
'style': 'width: 50%'
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
def get_user_login_form_cls(*, captcha=False):
|
||||
|
||||
@@ -1,19 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import inspect
|
||||
from urllib.parse import urlencode
|
||||
from functools import partial
|
||||
import time
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import authenticate
|
||||
from django.shortcuts import reverse
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
|
||||
from common.utils import get_object_or_none, get_request_ip, get_logger
|
||||
from users.models import User
|
||||
from users.utils import (
|
||||
is_block_login, clean_failed_count
|
||||
from django.contrib import auth
|
||||
from django.contrib.auth import (
|
||||
BACKEND_SESSION_KEY, _get_backends,
|
||||
PermissionDenied, user_login_failed, _clean_credentials
|
||||
)
|
||||
from django.shortcuts import reverse
|
||||
|
||||
from common.utils import get_object_or_none, get_request_ip, get_logger, bulk_get
|
||||
from users.models import User
|
||||
from users.utils import LoginBlockUtil, MFABlockUtils
|
||||
from . import errors
|
||||
from .utils import rsa_decrypt
|
||||
from .signals import post_auth_success, post_auth_failed
|
||||
@@ -22,8 +24,62 @@ from .const import RSA_PRIVATE_KEY
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
def check_backend_can_auth(username, backend_path, allowed_auth_backends):
|
||||
if allowed_auth_backends is not None and backend_path not in allowed_auth_backends:
|
||||
logger.debug('Skip user auth backend: {}, {} not in'.format(
|
||||
username, backend_path, ','.join(allowed_auth_backends)
|
||||
)
|
||||
)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def authenticate(request=None, **credentials):
|
||||
"""
|
||||
If the given credentials are valid, return a User object.
|
||||
"""
|
||||
username = credentials.get('username')
|
||||
allowed_auth_backends = User.get_user_allowed_auth_backends(username)
|
||||
|
||||
for backend, backend_path in _get_backends(return_tuples=True):
|
||||
# 预先检查,不浪费认证时间
|
||||
if not check_backend_can_auth(username, backend_path, allowed_auth_backends):
|
||||
continue
|
||||
|
||||
backend_signature = inspect.signature(backend.authenticate)
|
||||
try:
|
||||
backend_signature.bind(request, **credentials)
|
||||
except TypeError:
|
||||
# This backend doesn't accept these credentials as arguments. Try the next one.
|
||||
continue
|
||||
try:
|
||||
user = backend.authenticate(request, **credentials)
|
||||
except PermissionDenied:
|
||||
# This backend says to stop in our tracks - this user should not be allowed in at all.
|
||||
break
|
||||
if user is None:
|
||||
continue
|
||||
# 如果是 None, 证明没有检查过, 需要再次检查
|
||||
if allowed_auth_backends is None:
|
||||
# 有些 authentication 参数中不带 username, 之后还要再检查
|
||||
allowed_auth_backends = user.get_allowed_auth_backends()
|
||||
if not check_backend_can_auth(user.username, backend_path, allowed_auth_backends):
|
||||
continue
|
||||
|
||||
# Annotate the user object with the path of the backend.
|
||||
user.backend = backend_path
|
||||
return user
|
||||
|
||||
# The credentials supplied are invalid to all backends, fire signal
|
||||
user_login_failed.send(sender=__name__, credentials=_clean_credentials(credentials), request=request)
|
||||
|
||||
|
||||
auth.authenticate = authenticate
|
||||
|
||||
|
||||
class AuthMixin:
|
||||
request = None
|
||||
partial_credential_error = None
|
||||
|
||||
def get_user_from_session(self):
|
||||
if self.request.session.is_empty():
|
||||
@@ -59,7 +115,7 @@ class AuthMixin:
|
||||
else:
|
||||
username = self.request.POST.get("username")
|
||||
ip = self.get_request_ip()
|
||||
if is_block_login(username, ip):
|
||||
if LoginBlockUtil(username, ip).is_block():
|
||||
logger.warn('Ip was blocked' + ': ' + username + ':' + ip)
|
||||
exception = errors.BlockLoginError(username=username, ip=ip)
|
||||
if raise_exception:
|
||||
@@ -75,50 +131,75 @@ class AuthMixin:
|
||||
return rsa_decrypt(raw_passwd, rsa_private_key)
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=True)
|
||||
logger.error(f'Decrypt password faild: password[{raw_passwd}] rsa_private_key[{rsa_private_key}]')
|
||||
logger.error(f'Decrypt password failed: password[{raw_passwd}] '
|
||||
f'rsa_private_key[{rsa_private_key}]')
|
||||
return None
|
||||
return raw_passwd
|
||||
|
||||
def check_user_auth(self, decrypt_passwd=False):
|
||||
self.check_is_block()
|
||||
def raise_credential_error(self, error):
|
||||
raise self.partial_credential_error(error=error)
|
||||
|
||||
def get_auth_data(self, decrypt_passwd=False):
|
||||
request = self.request
|
||||
if hasattr(request, 'data'):
|
||||
data = request.data
|
||||
else:
|
||||
data = request.POST
|
||||
username = data.get('username', '')
|
||||
password = data.get('password', '')
|
||||
challenge = data.get('challenge', '')
|
||||
public_key = data.get('public_key', '')
|
||||
ip = self.get_request_ip()
|
||||
|
||||
CredentialError = partial(errors.CredentialError, username=username, ip=ip, request=request)
|
||||
items = ['username', 'password', 'challenge', 'public_key', 'auto_login']
|
||||
username, password, challenge, public_key, auto_login = bulk_get(data, *items, default='')
|
||||
password = password + challenge.strip()
|
||||
ip = self.get_request_ip()
|
||||
self.partial_credential_error = partial(errors.CredentialError, username=username, ip=ip, request=request)
|
||||
|
||||
if decrypt_passwd:
|
||||
password = self.decrypt_passwd(password)
|
||||
if not password:
|
||||
raise CredentialError(error=errors.reason_password_decrypt_failed)
|
||||
self.raise_credential_error(errors.reason_password_decrypt_failed)
|
||||
return username, password, public_key, ip, auto_login
|
||||
|
||||
user = authenticate(request,
|
||||
username=username,
|
||||
password=password + challenge.strip(),
|
||||
public_key=public_key)
|
||||
def _check_only_allow_exists_user_auth(self, username):
|
||||
# 仅允许预先存在的用户认证
|
||||
if settings.ONLY_ALLOW_EXIST_USER_AUTH:
|
||||
exist = User.objects.filter(username=username).exists()
|
||||
if not exist:
|
||||
logger.error(f"Only allow exist user auth, login failed: {username}")
|
||||
self.raise_credential_error(errors.reason_user_not_exist)
|
||||
|
||||
def _check_auth_user_is_valid(self, username, password, public_key):
|
||||
user = authenticate(self.request, username=username, password=password, public_key=public_key)
|
||||
if not user:
|
||||
raise CredentialError(error=errors.reason_password_failed)
|
||||
self.raise_credential_error(errors.reason_password_failed)
|
||||
elif user.is_expired:
|
||||
raise CredentialError(error=errors.reason_user_inactive)
|
||||
self.raise_credential_error(errors.reason_user_expired)
|
||||
elif not user.is_active:
|
||||
raise CredentialError(error=errors.reason_user_inactive)
|
||||
self.raise_credential_error(errors.reason_user_inactive)
|
||||
return user
|
||||
|
||||
def _check_login_acl(self, user, ip):
|
||||
# ACL 限制用户登录
|
||||
from acls.models import LoginACL
|
||||
is_allowed = LoginACL.allow_user_to_login(user, ip)
|
||||
if not is_allowed:
|
||||
raise errors.LoginIPNotAllowed(username=user.username, request=self.request)
|
||||
|
||||
def check_user_auth(self, decrypt_passwd=False):
|
||||
self.check_is_block()
|
||||
request = self.request
|
||||
username, password, public_key, ip, auto_login = self.get_auth_data(decrypt_passwd=decrypt_passwd)
|
||||
|
||||
self._check_only_allow_exists_user_auth(username)
|
||||
user = self._check_auth_user_is_valid(username, password, public_key)
|
||||
# 校验login-acl规则
|
||||
self._check_login_acl(user, ip)
|
||||
self._check_password_require_reset_or_not(user)
|
||||
self._check_passwd_is_too_simple(user, password)
|
||||
|
||||
clean_failed_count(username, ip)
|
||||
LoginBlockUtil(username, ip).clean_failed_count()
|
||||
request.session['auth_password'] = 1
|
||||
request.session['user_id'] = str(user.id)
|
||||
auth_backend = getattr(user, 'backend', 'django.contrib.auth.backends.ModelBackend')
|
||||
request.session['auth_backend'] = auth_backend
|
||||
request.session['auto_login'] = auto_login
|
||||
request.session['auth_backend'] = getattr(user, 'backend', settings.AUTH_BACKEND_MODEL)
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
@@ -170,15 +251,34 @@ class AuthMixin:
|
||||
raise errors.MFAUnsetError(user, self.request, url)
|
||||
raise errors.MFARequiredError()
|
||||
|
||||
def mark_mfa_ok(self):
|
||||
self.request.session['auth_mfa'] = 1
|
||||
self.request.session['auth_mfa_time'] = time.time()
|
||||
self.request.session['auth_mfa_type'] = 'otp'
|
||||
|
||||
def check_mfa_is_block(self, username, ip, raise_exception=True):
|
||||
if MFABlockUtils(username, ip).is_block():
|
||||
logger.warn('Ip was blocked' + ': ' + username + ':' + ip)
|
||||
exception = errors.BlockMFAError(username=username, request=self.request, ip=ip)
|
||||
if raise_exception:
|
||||
raise exception
|
||||
else:
|
||||
return exception
|
||||
|
||||
def check_user_mfa(self, code):
|
||||
user = self.get_user_from_session()
|
||||
ip = self.get_request_ip()
|
||||
self.check_mfa_is_block(user.username, ip)
|
||||
ok = user.check_mfa(code)
|
||||
if ok:
|
||||
self.request.session['auth_mfa'] = 1
|
||||
self.request.session['auth_mfa_time'] = time.time()
|
||||
self.request.session['auth_mfa_type'] = 'otp'
|
||||
self.mark_mfa_ok()
|
||||
return
|
||||
raise errors.MFAFailedError(username=user.username, request=self.request)
|
||||
|
||||
raise errors.MFAFailedError(
|
||||
username=user.username,
|
||||
request=self.request,
|
||||
ip=ip
|
||||
)
|
||||
|
||||
def get_ticket(self):
|
||||
from tickets.models import Ticket
|
||||
|
||||
@@ -68,6 +68,7 @@ class LoginConfirmSetting(CommonModelMixin):
|
||||
def create_confirm_ticket(self, request=None):
|
||||
from tickets import const
|
||||
from tickets.models import Ticket
|
||||
from orgs.models import Organization
|
||||
ticket_title = _('Login confirm') + ' {}'.format(self.user)
|
||||
ticket_meta = self.construct_confirm_ticket_meta(request)
|
||||
ticket_assignees = self.reviewers.all()
|
||||
@@ -75,6 +76,7 @@ class LoginConfirmSetting(CommonModelMixin):
|
||||
'title': ticket_title,
|
||||
'type': const.TicketTypeChoices.login_confirm.value,
|
||||
'meta': ticket_meta,
|
||||
'org_id': Organization.ROOT_ID,
|
||||
}
|
||||
ticket = Ticket.objects.create(**data)
|
||||
ticket.assignees.set(ticket_assignees)
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils import get_object_or_none
|
||||
from users.models import User
|
||||
from assets.models import Asset, SystemUser, Gateway
|
||||
from applications.models import Application
|
||||
from users.serializers import UserProfileSerializer
|
||||
from perms.serializers.asset.permission import ActionsField
|
||||
from .models import AccessKey, LoginConfirmSetting, SSOToken
|
||||
|
||||
|
||||
__all__ = [
|
||||
'AccessKeySerializer', 'OtpVerifySerializer', 'BearerTokenSerializer',
|
||||
'MFAChallengeSerializer', 'LoginConfirmSettingSerializer', 'SSOTokenSerializer',
|
||||
'ConnectionTokenSerializer', 'ConnectionTokenSecretSerializer', 'RDPFileSerializer'
|
||||
]
|
||||
|
||||
|
||||
@@ -40,6 +45,10 @@ class BearerTokenSerializer(serializers.Serializer):
|
||||
def get_keyword(obj):
|
||||
return 'Bearer'
|
||||
|
||||
def update_last_login(self, user):
|
||||
user.last_login = timezone.now()
|
||||
user.save(update_fields=['last_login'])
|
||||
|
||||
def create(self, validated_data):
|
||||
request = self.context.get('request')
|
||||
if request.user and not request.user.is_anonymous:
|
||||
@@ -52,6 +61,8 @@ class BearerTokenSerializer(serializers.Serializer):
|
||||
"user id {} not exist".format(user_id)
|
||||
)
|
||||
token, date_expired = user.create_bearer_token(request)
|
||||
self.update_last_login(user)
|
||||
|
||||
instance = {
|
||||
"token": token,
|
||||
"date_expired": date_expired,
|
||||
@@ -82,3 +93,103 @@ class SSOTokenSerializer(serializers.Serializer):
|
||||
username = serializers.CharField(write_only=True)
|
||||
login_url = serializers.CharField(read_only=True)
|
||||
next = serializers.CharField(write_only=True, allow_blank=True, required=False, allow_null=True)
|
||||
|
||||
|
||||
class ConnectionTokenSerializer(serializers.Serializer):
|
||||
user = serializers.CharField(max_length=128, required=False, allow_blank=True)
|
||||
system_user = serializers.CharField(max_length=128, required=True)
|
||||
asset = serializers.CharField(max_length=128, required=False)
|
||||
application = serializers.CharField(max_length=128, required=False)
|
||||
|
||||
@staticmethod
|
||||
def validate_user(user_id):
|
||||
from users.models import User
|
||||
user = User.objects.filter(id=user_id).first()
|
||||
if user is None:
|
||||
raise serializers.ValidationError('user id not exist')
|
||||
return user
|
||||
|
||||
@staticmethod
|
||||
def validate_system_user(system_user_id):
|
||||
from assets.models import SystemUser
|
||||
system_user = SystemUser.objects.filter(id=system_user_id).first()
|
||||
if system_user is None:
|
||||
raise serializers.ValidationError('system_user id not exist')
|
||||
return system_user
|
||||
|
||||
@staticmethod
|
||||
def validate_asset(asset_id):
|
||||
from assets.models import Asset
|
||||
asset = Asset.objects.filter(id=asset_id).first()
|
||||
if asset is None:
|
||||
raise serializers.ValidationError('asset id not exist')
|
||||
return asset
|
||||
|
||||
@staticmethod
|
||||
def validate_application(app_id):
|
||||
from applications.models import Application
|
||||
app = Application.objects.filter(id=app_id).first()
|
||||
if app is None:
|
||||
raise serializers.ValidationError('app id not exist')
|
||||
return app
|
||||
|
||||
def validate(self, attrs):
|
||||
asset = attrs.get('asset')
|
||||
application = attrs.get('application')
|
||||
if not asset and not application:
|
||||
raise serializers.ValidationError('asset or application required')
|
||||
if asset and application:
|
||||
raise serializers.ValidationError('asset and application should only one')
|
||||
return super().validate(attrs)
|
||||
|
||||
|
||||
class ConnectionTokenUserSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = User
|
||||
fields = ['id', 'name', 'username', 'email']
|
||||
|
||||
|
||||
class ConnectionTokenAssetSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = ['id', 'hostname', 'ip', 'port', 'org_id']
|
||||
|
||||
|
||||
class ConnectionTokenSystemUserSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = SystemUser
|
||||
fields = ['id', 'name', 'username', 'password', 'private_key']
|
||||
|
||||
|
||||
class ConnectionTokenGatewaySerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Gateway
|
||||
fields = ['id', 'ip', 'port', 'username', 'password', 'private_key']
|
||||
|
||||
|
||||
class ConnectionTokenRemoteAppSerializer(serializers.Serializer):
|
||||
program = serializers.CharField()
|
||||
working_directory = serializers.CharField()
|
||||
parameters = serializers.CharField()
|
||||
|
||||
|
||||
class ConnectionTokenApplicationSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = ['id', 'name', 'category', 'type']
|
||||
|
||||
|
||||
class ConnectionTokenSecretSerializer(serializers.Serializer):
|
||||
type = serializers.ChoiceField(choices=[('application', 'Application'), ('asset', 'Asset')])
|
||||
user = ConnectionTokenUserSerializer(read_only=True)
|
||||
asset = ConnectionTokenAssetSerializer(read_only=True)
|
||||
remote_app = ConnectionTokenRemoteAppSerializer(read_only=True)
|
||||
application = ConnectionTokenApplicationSerializer(read_only=True)
|
||||
system_user = ConnectionTokenSystemUserSerializer(read_only=True)
|
||||
gateway = ConnectionTokenGatewaySerializer(read_only=True)
|
||||
actions = ActionsField()
|
||||
|
||||
|
||||
class RDPFileSerializer(ConnectionTokenSerializer):
|
||||
width = serializers.IntegerField(default=1280)
|
||||
height = serializers.IntegerField(default=800)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from importlib import import_module
|
||||
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import user_logged_in
|
||||
from django.core.cache import cache
|
||||
@@ -24,18 +23,18 @@ def on_user_auth_login_success(sender, user, request, **kwargs):
|
||||
|
||||
|
||||
@receiver(openid_user_login_success)
|
||||
def on_oidc_user_login_success(sender, request, user, **kwargs):
|
||||
request.session[BACKEND_SESSION_KEY] = 'OIDCAuthCodeBackend'
|
||||
def on_oidc_user_login_success(sender, request, user, create=False, **kwargs):
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_OIDC_CODE
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
|
||||
@receiver(openid_user_login_failed)
|
||||
def on_oidc_user_login_failed(sender, username, request, reason, **kwargs):
|
||||
request.session[BACKEND_SESSION_KEY] = 'OIDCAuthCodeBackend'
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_OIDC_CODE
|
||||
post_auth_failed.send(sender, username=username, request=request, reason=reason)
|
||||
|
||||
|
||||
@receiver(cas_user_authenticated)
|
||||
def on_cas_user_login_success(sender, request, user, **kwargs):
|
||||
request.session[BACKEND_SESSION_KEY] = 'CASBackend'
|
||||
request.session['auth_backend'] = settings.AUTH_BACKEND_CAS
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
{% load static %}
|
||||
{% load i18n %}
|
||||
{% load bootstrap3 %}
|
||||
{% load static %}
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<!--/*@thymesVar id="LoginConstants" type="com.fit2cloud.support.common.constants.LoginConstants"*/-->
|
||||
<!--/*@thymesVar id="message" type="java.lang.String"*/-->
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<link rel="shortcut icon" href="{{ FAVICON_URL }}" type="image/x-icon">
|
||||
<link rel="shortcut icon" href="{{ FAVICON_URL }}" type="image/x-icon">
|
||||
<title>
|
||||
{{ JMS_TITLE }}
|
||||
</title>
|
||||
@@ -16,6 +15,8 @@
|
||||
<link href="{% static 'css/font-awesome.min.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/bootstrap-style.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/login-style.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/style.css' %}" rel="stylesheet">
|
||||
<link href="{% static 'css/jumpserver.css' %}" rel="stylesheet">
|
||||
|
||||
<!-- scripts -->
|
||||
<script src="{% static 'js/jquery-3.1.1.min.js' %}"></script>
|
||||
@@ -24,26 +25,54 @@
|
||||
<script src="{% static 'js/plugins/datatables/datatables.min.js' %}"></script>
|
||||
|
||||
<style>
|
||||
.login-content {
|
||||
box-shadow: 0 5px 5px -3px rgb(0 0 0 / 20%), 0 8px 10px 1px rgb(0 0 0 / 14%), 0 3px 14px 2px rgb(0 0 0 / 12%);
|
||||
}
|
||||
|
||||
.box-1{
|
||||
.help-block {
|
||||
margin: 0;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
form label {
|
||||
color: #737373;
|
||||
font-size: 13px;
|
||||
font-weight: normal;
|
||||
}
|
||||
|
||||
.hr-line-dashed {
|
||||
border-top: 1px dashed #e7eaec;
|
||||
color: #ffffff;
|
||||
background-color: #ffffff;
|
||||
height: 1px;
|
||||
margin: 20px 0;
|
||||
}
|
||||
|
||||
.login-content {
|
||||
height: 472px;
|
||||
width: 984px;
|
||||
margin-right: auto;
|
||||
margin-left: auto;
|
||||
margin-top: calc((100vh - 470px)/2);
|
||||
|
||||
margin-top: calc((100vh - 470px) / 3);
|
||||
}
|
||||
.box-2{
|
||||
body {
|
||||
background-color: #f2f2f2;
|
||||
height: calc(100vh - (100vh - 470px) / 3);
|
||||
}
|
||||
|
||||
.right-image-box {
|
||||
height: 100%;
|
||||
width: 50%;
|
||||
float: right;
|
||||
}
|
||||
.box-3{
|
||||
|
||||
.left-form-box {
|
||||
text-align: center;
|
||||
background-color: white;
|
||||
height: 100%;
|
||||
width: 50%;
|
||||
}
|
||||
|
||||
.captcha {
|
||||
float: right;
|
||||
}
|
||||
@@ -56,136 +85,144 @@
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.form-group.has-error {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.captch-field .has-error .help-block {
|
||||
margin-top: -8px !important;
|
||||
}
|
||||
|
||||
.no-captcha-challenge .form-group {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
.jms-title {
|
||||
padding: 40px 10px 10px;
|
||||
}
|
||||
|
||||
.no-captcha-challenge .jms-title {
|
||||
padding: 60px 10px 10px;
|
||||
}
|
||||
|
||||
.no-captcha-challenge .welcome-message {
|
||||
padding-top: 10px;
|
||||
}
|
||||
|
||||
.radio, .checkbox {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#github_star {
|
||||
float: right;
|
||||
margin: 10px 10px 0 0;
|
||||
}
|
||||
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body style="height: 100%;font-size: 13px">
|
||||
<div>
|
||||
<div class="box-1">
|
||||
<div class="box-2">
|
||||
<img src="{{ LOGIN_IMAGE_URL }}" style="height: 100%; width: 100%"/>
|
||||
<body>
|
||||
<div class="login-content ">
|
||||
<div class="right-image-box">
|
||||
<a href="{% if not XPACK_ENABLED %}https://github.com/jumpserver/jumpserver{% endif %}">
|
||||
<img src="{{ LOGIN_IMAGE_URL }}" style="height: 100%; width: 100%"/>
|
||||
</a>
|
||||
</div>
|
||||
<div class="left-form-box {% if not form.challenge and not form.captcha %} no-captcha-challenge {% endif %}">
|
||||
<div style="background-color: white">
|
||||
<div class="jms-title">
|
||||
<span style="font-size: 21px;font-weight:400;color: #151515;letter-spacing: 0;">{{ JMS_TITLE }}</span>
|
||||
</div>
|
||||
<div class="box-3">
|
||||
<div style="background-color: white">
|
||||
{% if form.challenge %}
|
||||
<div style="margin-top: 20px;padding-top: 30px;padding-left: 20px;padding-right: 20px;height: 60px">
|
||||
<div class="contact-form col-md-10 col-md-offset-1">
|
||||
<form id="login-form" action="" method="post" role="form" novalidate="novalidate">
|
||||
{% csrf_token %}
|
||||
<div style="line-height: 17px;margin-bottom: 20px;color: #999999;">
|
||||
{% if form.errors %}
|
||||
<p class="red-fonts" style="color: red">
|
||||
{% if form.non_field_errors %}
|
||||
{{ form.non_field_errors.as_text }}
|
||||
{% endif %}
|
||||
</p>
|
||||
{% else %}
|
||||
<div style="margin-top: 20px;padding-top: 40px;padding-left: 20px;padding-right: 20px;height: 80px">
|
||||
<p class="welcome-message">
|
||||
{% trans 'Welcome back, please enter username and password to login' %}
|
||||
</p>
|
||||
{% endif %}
|
||||
<span style="font-size: 21px;font-weight:400;color: #151515;letter-spacing: 0;">{{ JMS_TITLE }}</span>
|
||||
</div>
|
||||
<div style="font-size: 12px;color: #999999;letter-spacing: 0;line-height: 18px;margin-top: 18px">
|
||||
{% trans 'Welcome back, please enter username and password to login' %}
|
||||
</div>
|
||||
<div style="margin-bottom: 0px">
|
||||
<div>
|
||||
<div class="col-md-1"></div>
|
||||
<div class="contact-form col-md-10" style="margin-top: 0px;height: 35px">
|
||||
<form id="contact-form" action="" method="post" role="form" novalidate="novalidate">
|
||||
{% csrf_token %}
|
||||
{% if form.non_field_errors %}
|
||||
{% if form.challenge %}
|
||||
<div style="height: 50px;color: red;line-height: 17px;">
|
||||
{% else %}
|
||||
<div style="height: 70px;color: red;line-height: 17px;">
|
||||
{% endif %}
|
||||
<p class="red-fonts">{{ form.non_field_errors.as_text }}</p>
|
||||
</div>
|
||||
{% elif form.errors.captcha %}
|
||||
<p class="red-fonts">{% trans 'Captcha invalid' %}</p>
|
||||
{% else %}
|
||||
<div style="height: 50px"></div>
|
||||
{% endif %}
|
||||
|
||||
<div class="form-group">
|
||||
<input type="text" class="form-control" name="{{ form.username.html_name }}" placeholder="{% trans 'Username' %}" required="" value="{% if form.username.value %}{{ form.username.value }}{% endif %}" style="height: 35px">
|
||||
{% if form.errors.username %}
|
||||
<div class="help-block field-error">
|
||||
<p class="red-fonts">{{ form.errors.username.as_text }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<input type="password" class="form-control" id="password" placeholder="{% trans 'Password' %}" required="">
|
||||
<input id="password-hidden" type="text" style="display:none" name="{{ form.password.html_name }}">
|
||||
{% if form.errors.password %}
|
||||
<div class="help-block field-error">
|
||||
<p class="red-fonts">{{ form.errors.password.as_text }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% if form.challenge %}
|
||||
<div class="form-group">
|
||||
<input type="challenge" class="form-control" id="challenge" name="{{ form.challenge.html_name }}" placeholder="{% trans 'MFA code' %}" >
|
||||
{% if form.errors.challenge %}
|
||||
<div class="help-block field-error">
|
||||
<p class="red-fonts">{{ form.errors.challenge.as_text }}</p>
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{% if form.captcha %}
|
||||
<div class="form-group" style="height: 50px;margin-bottom: 0;font-size: 13px">
|
||||
{{ form.captcha }}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="form-group" style="height: 25px;margin-bottom: 0;font-size: 13px"></div>
|
||||
{% endif %}
|
||||
<div class="form-group" style="margin-top: 10px">
|
||||
<button type="submit" class="btn btn-transparent" onclick="doLogin();return false;">{% trans 'Login' %}</button>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{% if AUTH_OPENID or AUTH_CAS %}
|
||||
<div class="hr-line-dashed"></div>
|
||||
<div style="display: inline-block; float: left">
|
||||
<b class="text-muted text-left" style="margin-right: 10px">{% trans "More login options" %}</b>
|
||||
{% if AUTH_OPENID %}
|
||||
<a href="{% url 'authentication:openid:login' %}">
|
||||
<i class="fa fa-openid"></i> {% trans 'OpenID' %}
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if AUTH_CAS %}
|
||||
<a href="{% url 'authentication:cas:cas-login' %}">
|
||||
<i class="fa"><img src="{{ LOGIN_CAS_LOGO_URL }}" height="13" width="13"></i> {% trans 'CAS' %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="text-center" style="display: inline-block; float: right">
|
||||
{% else %}
|
||||
<div class="text-center" style="display: inline-block;">
|
||||
{% endif %}
|
||||
<a id="forgot_password" href="{% url 'authentication:forgot-password' %}">
|
||||
<small>{% trans 'Forgot password' %}?</small>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
{% bootstrap_field form.username show_label=False %}
|
||||
<div class="form-group">
|
||||
<input type="password" class="form-control" id="password" placeholder="{% trans 'Password' %}" required="">
|
||||
<input id="password-hidden" type="text" style="display:none" name="{{ form.password.html_name }}">
|
||||
</div>
|
||||
{% if form.challenge %}
|
||||
{% bootstrap_field form.challenge show_label=False %}
|
||||
{% elif form.captcha %}
|
||||
<div class="captch-field">
|
||||
{% bootstrap_field form.captcha show_label=False %}
|
||||
</div>
|
||||
{% endif %}
|
||||
<div class="form-group" style="padding-top: 5px; margin-bottom: 10px">
|
||||
<div class="row">
|
||||
<div class="col-md-6" style="text-align: left">
|
||||
{% if form.auto_login %}
|
||||
{% bootstrap_field form.auto_login form_group_class='' %}
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<a id="forgot_password" href="{{ forgot_password_url }}" style="float: right">
|
||||
<small>{% trans 'Forgot password' %}?</small>
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-1"></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group" style="">
|
||||
<button type="submit" class="btn btn-transparent" onclick="doLogin();return false;">{% trans 'Login' %}</button>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
{% if AUTH_OPENID or AUTH_CAS %}
|
||||
<div class="hr-line-dashed"></div>
|
||||
<div style="display: inline-block; float: left">
|
||||
<b class="text-muted text-left" style="margin-right: 10px">{% trans "More login options" %}</b>
|
||||
{% if AUTH_OPENID %}
|
||||
<a href="{% url 'authentication:openid:login' %}" class="more-login-item">
|
||||
<i class="fa fa-openid"></i> {% trans 'OpenID' %}
|
||||
</a>
|
||||
{% endif %}
|
||||
{% if AUTH_CAS %}
|
||||
<a href="{% url 'authentication:cas:cas-login' %}" class="more-login-item">
|
||||
<i class="fa"><img src="{{ LOGIN_CAS_LOGO_URL }}" height="13" width="13"></i> {% trans 'CAS' %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
{% else %}
|
||||
<div class="text-center" style="display: inline-block;">
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</body>
|
||||
<script type="text/javascript" src="/static/js/plugins/jsencrypt/jsencrypt.min.js"></script>
|
||||
<script>
|
||||
function encryptLoginPassword(password, rsaPublicKey){
|
||||
function encryptLoginPassword(password, rsaPublicKey) {
|
||||
var jsencrypt = new JSEncrypt(); //加密对象
|
||||
jsencrypt.setPublicKey(rsaPublicKey); // 设置密钥
|
||||
return jsencrypt.encrypt(password); //加密
|
||||
}
|
||||
|
||||
function doLogin() {
|
||||
//公钥加密
|
||||
var rsaPublicKey = "{{ rsa_public_key }}"
|
||||
var password =$('#password').val(); //明文密码
|
||||
var password = $('#password').val(); //明文密码
|
||||
var passwordEncrypted = encryptLoginPassword(password, rsaPublicKey)
|
||||
$('#password-hidden').val(passwordEncrypted); //返回给密码输入input
|
||||
$('#contact-form').submit();//post提交
|
||||
$('#login-form').submit();//post提交
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
|
||||
@@ -9,6 +9,7 @@ app_name = 'authentication'
|
||||
router = DefaultRouter()
|
||||
router.register('access-keys', api.AccessKeyViewSet, 'access-key')
|
||||
router.register('sso', api.SSOViewSet, 'sso')
|
||||
router.register('connection-token', api.UserConnectionTokenViewSet, 'connection-token')
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
@@ -16,12 +17,9 @@ urlpatterns = [
|
||||
path('auth/', api.TokenCreateApi.as_view(), name='user-auth'),
|
||||
path('tokens/', api.TokenCreateApi.as_view(), name='auth-token'),
|
||||
path('mfa/challenge/', api.MFAChallengeApi.as_view(), name='mfa-challenge'),
|
||||
path('connection-token/',
|
||||
api.UserConnectionTokenApi.as_view(), name='connection-token'),
|
||||
path('otp/verify/', api.UserOtpVerifyApi.as_view(), name='user-otp-verify'),
|
||||
path('login-confirm-ticket/status/', api.TicketStatusApi.as_view(), name='login-confirm-ticket-status'),
|
||||
path('login-confirm-settings/<uuid:user_id>/', api.LoginConfirmSettingUpdateApi.as_view(), name='login-confirm-setting-update')
|
||||
]
|
||||
|
||||
urlpatterns += router.urls
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import base64
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Cipher import PKCS1_v1_5
|
||||
from Crypto import Random
|
||||
from Cryptodome.PublicKey import RSA
|
||||
from Cryptodome.Cipher import PKCS1_v1_5
|
||||
from Cryptodome import Random
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
|
||||
@@ -45,15 +45,19 @@ class UserLoginView(mixins.AuthMixin, FormView):
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
if request.user.is_staff:
|
||||
return redirect(redirect_user_first_login_or_index(
|
||||
request, self.redirect_field_name)
|
||||
first_login_url = redirect_user_first_login_or_index(
|
||||
request, self.redirect_field_name
|
||||
)
|
||||
return redirect(first_login_url)
|
||||
request.session.set_test_cookie()
|
||||
return super().get(request, *args, **kwargs)
|
||||
|
||||
def form_valid(self, form):
|
||||
if not self.request.session.test_cookie_worked():
|
||||
return HttpResponse(_("Please enable cookies and try again."))
|
||||
# https://docs.djangoproject.com/en/3.1/topics/http/sessions/#setting-test-cookies
|
||||
self.request.session.delete_test_cookie()
|
||||
|
||||
try:
|
||||
self.check_user_auth(decrypt_passwd=True)
|
||||
except errors.AuthFailedError as e:
|
||||
@@ -65,6 +69,7 @@ class UserLoginView(mixins.AuthMixin, FormView):
|
||||
new_form = form_cls(data=form.data)
|
||||
new_form._errors = form.errors
|
||||
context = self.get_context_data(form=new_form)
|
||||
self.request.session.set_test_cookie()
|
||||
return self.render_to_response(context)
|
||||
except (errors.PasswdTooSimple, errors.PasswordRequireResetError) as e:
|
||||
return redirect(e.url)
|
||||
@@ -99,11 +104,17 @@ class UserLoginView(mixins.AuthMixin, FormView):
|
||||
self.request.session[RSA_PRIVATE_KEY] = rsa_private_key
|
||||
self.request.session[RSA_PUBLIC_KEY] = rsa_public_key
|
||||
|
||||
forgot_password_url = reverse('authentication:forgot-password')
|
||||
has_other_auth_backend = settings.AUTHENTICATION_BACKENDS[0] != settings.AUTH_BACKEND_MODEL
|
||||
if has_other_auth_backend and settings.FORGOT_PASSWORD_URL:
|
||||
forgot_password_url = settings.FORGOT_PASSWORD_URL
|
||||
|
||||
context = {
|
||||
'demo_mode': os.environ.get("DEMO_MODE"),
|
||||
'AUTH_OPENID': settings.AUTH_OPENID,
|
||||
'AUTH_CAS': settings.AUTH_CAS,
|
||||
'rsa_public_key': rsa_public_key,
|
||||
'forgot_password_url': forgot_password_url
|
||||
}
|
||||
kwargs.update(context)
|
||||
return super().get_context_data(**kwargs)
|
||||
@@ -121,6 +132,13 @@ class UserLoginGuardView(mixins.AuthMixin, RedirectView):
|
||||
url = "%s?%s" % (url, args)
|
||||
return url
|
||||
|
||||
def login_it(self, user):
|
||||
auth_login(self.request, user)
|
||||
# 如果设置了自动登录,那需要设置 session_id cookie 的有效期
|
||||
if self.request.session.get('auto_login'):
|
||||
age = self.request.session.get_expiry_age()
|
||||
self.request.session.set_expiry(age)
|
||||
|
||||
def get_redirect_url(self, *args, **kwargs):
|
||||
try:
|
||||
user = self.check_user_auth_if_need()
|
||||
@@ -137,7 +155,7 @@ class UserLoginGuardView(mixins.AuthMixin, RedirectView):
|
||||
except errors.PasswdTooSimple as e:
|
||||
return e.url
|
||||
else:
|
||||
auth_login(self.request, user)
|
||||
self.login_it(user)
|
||||
self.send_auth_signal(success=True, user=user)
|
||||
self.clear_auth_mark()
|
||||
url = redirect_user_first_login_or_index(
|
||||
@@ -156,7 +174,7 @@ class UserLoginWaitConfirmView(TemplateView):
|
||||
if not ticket_id:
|
||||
ticket = None
|
||||
else:
|
||||
ticket = get_object_or_none(Ticket, pk=ticket_id)
|
||||
ticket = Ticket.all().filter(pk=ticket_id).first()
|
||||
context = super().get_context_data(**kwargs)
|
||||
if ticket:
|
||||
timestamp_created = datetime.datetime.timestamp(ticket.date_created)
|
||||
|
||||
@@ -22,10 +22,12 @@ class UserLoginOtpView(mixins.AuthMixin, FormView):
|
||||
try:
|
||||
self.check_user_mfa(otp_code)
|
||||
return redirect_to_guard_view()
|
||||
except errors.MFAFailedError as e:
|
||||
except (errors.MFAFailedError, errors.BlockMFAError) as e:
|
||||
form.add_error('otp_code', e.msg)
|
||||
return super().form_invalid(form)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
import traceback
|
||||
traceback.print_exception()
|
||||
return redirect_to_guard_view()
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from rest_framework.viewsets import GenericViewSet
|
||||
|
||||
from common.permissions import IsValidUser
|
||||
from .http import HttpResponseTemporaryRedirect
|
||||
from .const import KEY_CACHE_RESOURCES_ID
|
||||
from .const import KEY_CACHE_RESOURCE_IDS
|
||||
from .utils import get_logger
|
||||
from .mixins import CommonApiMixin
|
||||
|
||||
@@ -93,7 +93,7 @@ class ResourcesIDCacheApi(APIView):
|
||||
spm = str(uuid.uuid4())
|
||||
resources = request.data.get('resources')
|
||||
if resources is not None:
|
||||
cache_key = KEY_CACHE_RESOURCES_ID.format(spm)
|
||||
cache_key = KEY_CACHE_RESOURCE_IDS.format(spm)
|
||||
cache.set(cache_key, resources, 300)
|
||||
return Response({'spm': spm})
|
||||
|
||||
|
||||
@@ -1,13 +1,24 @@
|
||||
import json
|
||||
from django.core.cache import cache
|
||||
import time
|
||||
|
||||
from redis import Redis
|
||||
|
||||
from common.utils.lock import DistributedLock
|
||||
from common.utils import lazyproperty
|
||||
from common.utils import get_logger
|
||||
from jumpserver.const import CONFIG
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class ComputeLock(DistributedLock):
|
||||
"""
|
||||
需要重建缓存的时候加上该锁,避免重复计算
|
||||
"""
|
||||
def __init__(self, key):
|
||||
name = f'compute:{key}'
|
||||
super().__init__(name=name)
|
||||
|
||||
|
||||
class CacheFieldBase:
|
||||
field_type = str
|
||||
|
||||
@@ -25,7 +36,7 @@ class IntegerField(CacheFieldBase):
|
||||
field_type = int
|
||||
|
||||
|
||||
class CacheBase(type):
|
||||
class CacheType(type):
|
||||
def __new__(cls, name, bases, attrs: dict):
|
||||
to_update = {}
|
||||
field_desc_mapper = {}
|
||||
@@ -41,12 +52,31 @@ class CacheBase(type):
|
||||
return type.__new__(cls, name, bases, attrs)
|
||||
|
||||
|
||||
class Cache(metaclass=CacheBase):
|
||||
class Cache(metaclass=CacheType):
|
||||
field_desc_mapper: dict
|
||||
timeout = None
|
||||
|
||||
def __init__(self):
|
||||
self._data = None
|
||||
self.redis = Redis(host=CONFIG.REDIS_HOST, port=CONFIG.REDIS_PORT, password=CONFIG.REDIS_PASSWORD)
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.field_desc_mapper[item]
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self.field_desc_mapper
|
||||
|
||||
def get_field(self, name):
|
||||
return self.field_desc_mapper[name]
|
||||
|
||||
@property
|
||||
def fields(self):
|
||||
return self.field_desc_mapper.values()
|
||||
|
||||
@property
|
||||
def field_names(self):
|
||||
names = self.field_desc_mapper.keys()
|
||||
return names
|
||||
|
||||
@lazyproperty
|
||||
def key_suffix(self):
|
||||
@@ -64,81 +94,75 @@ class Cache(metaclass=CacheBase):
|
||||
@property
|
||||
def data(self):
|
||||
if self._data is None:
|
||||
data = self.get_data()
|
||||
if data is None:
|
||||
# 缓存中没有数据时,去数据库获取
|
||||
self.compute_and_set_all_data()
|
||||
data = self.load_data_from_db()
|
||||
if not data:
|
||||
with ComputeLock(self.key):
|
||||
data = self.load_data_from_db()
|
||||
if not data:
|
||||
# 缓存中没有数据时,去数据库获取
|
||||
self.init_all_values()
|
||||
return self._data
|
||||
|
||||
def get_data(self) -> dict:
|
||||
data = cache.get(self.key)
|
||||
logger.debug(f'CACHE: get {self.key} = {data}')
|
||||
if data is not None:
|
||||
data = json.loads(data)
|
||||
def to_internal_value(self, data: dict):
|
||||
internal_data = {}
|
||||
for k, v in data.items():
|
||||
field = k.decode()
|
||||
if field in self:
|
||||
value = self[field].to_internal_value(v.decode())
|
||||
internal_data[field] = value
|
||||
else:
|
||||
logger.warn(f'Cache got invalid field: '
|
||||
f'key={self.key} '
|
||||
f'invalid_field={field} '
|
||||
f'valid_fields={self.field_names}')
|
||||
return internal_data
|
||||
|
||||
def load_data_from_db(self) -> dict:
|
||||
data = self.redis.hgetall(self.key)
|
||||
logger.debug(f'Get data from cache: key={self.key} data={data}')
|
||||
if data:
|
||||
data = self.to_internal_value(data)
|
||||
self._data = data
|
||||
return data
|
||||
|
||||
def set_data(self, data):
|
||||
self._data = data
|
||||
to_json = json.dumps(data)
|
||||
logger.info(f'CACHE: set {self.key} = {to_json}, timeout={self.timeout}')
|
||||
cache.set(self.key, to_json, timeout=self.timeout)
|
||||
def save_data_to_db(self, data):
|
||||
logger.info(f'Set data to cache: key={self.key} data={data}')
|
||||
self.redis.hset(self.key, mapping=data)
|
||||
self.load_data_from_db()
|
||||
|
||||
def compute_values(self, *fields):
|
||||
field_objs = []
|
||||
for field in fields:
|
||||
field_objs.append(self[field])
|
||||
|
||||
def compute_data(self, *fields):
|
||||
field_descs = []
|
||||
if not fields:
|
||||
field_descs = self.field_desc_mapper.values()
|
||||
else:
|
||||
for field in fields:
|
||||
assert field in self.field_desc_mapper, f'{field} is not a valid field'
|
||||
field_descs.append(self.field_desc_mapper[field])
|
||||
data = {
|
||||
field_desc.field_name: field_desc.compute_value(self)
|
||||
for field_desc in field_descs
|
||||
field_obj.field_name: field_obj.compute_value(self)
|
||||
for field_obj in field_objs
|
||||
}
|
||||
return data
|
||||
|
||||
def compute_and_set_all_data(self, computed_data: dict = None):
|
||||
"""
|
||||
TODO 怎样防止并发更新全部数据,浪费数据库资源
|
||||
"""
|
||||
uncomputed_keys = ()
|
||||
if computed_data:
|
||||
computed_keys = computed_data.keys()
|
||||
all_keys = self.field_desc_mapper.keys()
|
||||
uncomputed_keys = all_keys - computed_keys
|
||||
else:
|
||||
computed_data = {}
|
||||
data = self.compute_data(*uncomputed_keys)
|
||||
data.update(computed_data)
|
||||
self.set_data(data)
|
||||
def init_all_values(self):
|
||||
t_start = time.time()
|
||||
logger.info(f'Start init cache: key={self.key}')
|
||||
data = self.compute_values(*self.field_names)
|
||||
self.save_data_to_db(data)
|
||||
logger.info(f'End init cache: cost={time.time()-t_start} key={self.key}')
|
||||
return data
|
||||
|
||||
def refresh_part_data_with_lock(self, refresh_data):
|
||||
with DistributedLock(name=f'{self.key}.refresh'):
|
||||
data = self.get_data()
|
||||
if data is not None:
|
||||
data.update(refresh_data)
|
||||
self.set_data(data)
|
||||
return data
|
||||
|
||||
def refresh(self, *fields):
|
||||
if not fields:
|
||||
# 没有指定 field 要刷新所有的值
|
||||
self.compute_and_set_all_data()
|
||||
self.init_all_values()
|
||||
return
|
||||
|
||||
data = self.get_data()
|
||||
if data is None:
|
||||
data = self.load_data_from_db()
|
||||
if not data:
|
||||
# 缓存中没有数据,设置所有的值
|
||||
self.compute_and_set_all_data()
|
||||
self.init_all_values()
|
||||
return
|
||||
|
||||
refresh_data = self.compute_data(*fields)
|
||||
if not self.refresh_part_data_with_lock(refresh_data):
|
||||
# 刷新部分失败,缓存中没有数据,更新所有的值
|
||||
self.compute_and_set_all_data(refresh_data)
|
||||
return
|
||||
refresh_values = self.compute_values(*fields)
|
||||
self.save_data_to_db(refresh_values)
|
||||
|
||||
def get_key_suffix(self):
|
||||
raise NotImplementedError
|
||||
@@ -146,10 +170,14 @@ class Cache(metaclass=CacheBase):
|
||||
def reload(self):
|
||||
self._data = None
|
||||
|
||||
def delete(self):
|
||||
def expire(self, *fields):
|
||||
self._data = None
|
||||
logger.info(f'CACHE: delete {self.key}')
|
||||
cache.delete(self.key)
|
||||
if not fields:
|
||||
logger.info(f'Delete cached key: key={self.key}')
|
||||
self.redis.delete(self.key)
|
||||
else:
|
||||
self.redis.hdel(self.key, *fields)
|
||||
logger.info(f'Expire cached fields: key={self.key} fields={fields}')
|
||||
|
||||
|
||||
class CacheValueDesc:
|
||||
@@ -167,10 +195,13 @@ class CacheValueDesc:
|
||||
return self
|
||||
if self.field_name not in instance.data:
|
||||
instance.refresh(self.field_name)
|
||||
value = instance.data[self.field_name]
|
||||
# 防止边界情况没有值,报错
|
||||
value = instance.data.get(self.field_name)
|
||||
return value
|
||||
|
||||
def compute_value(self, instance: Cache):
|
||||
t_start = time.time()
|
||||
logger.info(f'Start compute cache field: field={self.field_name} key={instance.key}')
|
||||
if self.field_type.queryset is not None:
|
||||
new_value = self.field_type.queryset.count()
|
||||
else:
|
||||
@@ -183,5 +214,8 @@ class CacheValueDesc:
|
||||
new_value = compute_func()
|
||||
|
||||
new_value = self.field_type.field_type(new_value)
|
||||
logger.info(f'CACHE: compute {instance.key}.{self.field_name} = {new_value}')
|
||||
logger.info(f'End compute cache field: cost={time.time()-t_start} field={self.field_name} value={new_value} key={instance.key}')
|
||||
return new_value
|
||||
|
||||
def to_internal_value(self, value):
|
||||
return self.field_type.field_type(value)
|
||||
|
||||
@@ -7,7 +7,7 @@ create_success_msg = _("%(name)s was created successfully")
|
||||
update_success_msg = _("%(name)s was updated successfully")
|
||||
FILE_END_GUARD = ">>> Content End <<<"
|
||||
celery_task_pre_key = "CELERY_"
|
||||
KEY_CACHE_RESOURCES_ID = "RESOURCES_ID_{}"
|
||||
KEY_CACHE_RESOURCE_IDS = "RESOURCE_IDS_{}"
|
||||
|
||||
# AD User AccountDisable
|
||||
# https://blog.csdn.net/bytxl/article/details/17763975
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
UPDATE_NODE_TREE_LOCK_KEY = 'org_level_transaction_lock_{org_id}_assets_update_node_tree'
|
||||
UPDATE_MAPPING_NODE_TASK_LOCK_KEY = 'org_level_transaction_lock_{user_id}_update_mapping_node_task'
|
||||
@@ -10,8 +10,11 @@
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from functools import reduce, partial
|
||||
import inspect
|
||||
|
||||
from django.db.models import *
|
||||
from django.db.models import QuerySet
|
||||
from django.db.models.functions import Concat
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
@@ -82,3 +85,88 @@ class JMSModel(JMSBaseModel):
|
||||
|
||||
def concated_display(name1, name2):
|
||||
return Concat(F(name1), Value('('), F(name2), Value(')'))
|
||||
|
||||
|
||||
def output_as_string(field_name):
|
||||
return ExpressionWrapper(F(field_name), output_field=CharField())
|
||||
|
||||
|
||||
class UnionQuerySet(QuerySet):
|
||||
after_union = ['order_by']
|
||||
not_return_qs = [
|
||||
'query', 'get', 'create', 'get_or_create',
|
||||
'update_or_create', 'bulk_create', 'count',
|
||||
'latest', 'earliest', 'first', 'last', 'aggregate',
|
||||
'exists', 'update', 'delete', 'as_manager', 'explain',
|
||||
]
|
||||
|
||||
def __init__(self, *queryset_list):
|
||||
self.queryset_list = queryset_list
|
||||
self.after_union_items = []
|
||||
self.before_union_items = []
|
||||
|
||||
def __execute(self):
|
||||
queryset_list = []
|
||||
for qs in self.queryset_list:
|
||||
for attr, args, kwargs in self.before_union_items:
|
||||
qs = getattr(qs, attr)(*args, **kwargs)
|
||||
queryset_list.append(qs)
|
||||
union_qs = reduce(lambda x, y: x.union(y), queryset_list)
|
||||
for attr, args, kwargs in self.after_union_items:
|
||||
union_qs = getattr(union_qs, attr)(*args, **kwargs)
|
||||
return union_qs
|
||||
|
||||
def __before_union_perform(self, item, *args, **kwargs):
|
||||
self.before_union_items.append((item, args, kwargs))
|
||||
return self.__clone(*self.queryset_list)
|
||||
|
||||
def __after_union_perform(self, item, *args, **kwargs):
|
||||
self.after_union_items.append((item, args, kwargs))
|
||||
return self.__clone(*self.queryset_list)
|
||||
|
||||
def __clone(self, *queryset_list):
|
||||
uqs = UnionQuerySet(*queryset_list)
|
||||
uqs.after_union_items = self.after_union_items
|
||||
uqs.before_union_items = self.before_union_items
|
||||
return uqs
|
||||
|
||||
def __getattribute__(self, item):
|
||||
if item.startswith('__') or item in UnionQuerySet.__dict__ or item in [
|
||||
'queryset_list', 'after_union_items', 'before_union_items'
|
||||
]:
|
||||
return object.__getattribute__(self, item)
|
||||
|
||||
if item in UnionQuerySet.not_return_qs:
|
||||
return getattr(self.__execute(), item)
|
||||
|
||||
origin_item = object.__getattribute__(self, 'queryset_list')[0]
|
||||
origin_attr = getattr(origin_item, item, None)
|
||||
if not inspect.ismethod(origin_attr):
|
||||
return getattr(self.__execute(), item)
|
||||
|
||||
if item in UnionQuerySet.after_union:
|
||||
attr = partial(self.__after_union_perform, item)
|
||||
else:
|
||||
attr = partial(self.__before_union_perform, item)
|
||||
return attr
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.__execute()[item]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.__execute())
|
||||
|
||||
def __str__(self):
|
||||
return str(self.__execute())
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self.__execute())
|
||||
|
||||
@classmethod
|
||||
def test_it(cls):
|
||||
from assets.models import Asset
|
||||
assets1 = Asset.objects.filter(hostname__startswith='a')
|
||||
assets2 = Asset.objects.filter(hostname__startswith='b')
|
||||
|
||||
qs = cls(assets1, assets2)
|
||||
return qs
|
||||
|
||||
@@ -3,39 +3,35 @@ from rest_framework_bulk import BulkModelViewSet
|
||||
|
||||
from ..mixins.api import (
|
||||
SerializerMixin2, QuerySetMixin, ExtraFilterFieldsMixin, PaginatedResponseMixin,
|
||||
RelationMixin, AllowBulkDestoryMixin
|
||||
RelationMixin, AllowBulkDestoryMixin, RenderToJsonMixin,
|
||||
)
|
||||
|
||||
|
||||
class JmsGenericViewSet(SerializerMixin2,
|
||||
QuerySetMixin,
|
||||
ExtraFilterFieldsMixin,
|
||||
PaginatedResponseMixin,
|
||||
class CommonMixin(SerializerMixin2,
|
||||
QuerySetMixin,
|
||||
ExtraFilterFieldsMixin,
|
||||
PaginatedResponseMixin,
|
||||
RenderToJsonMixin):
|
||||
pass
|
||||
|
||||
|
||||
class JmsGenericViewSet(CommonMixin,
|
||||
GenericViewSet):
|
||||
pass
|
||||
|
||||
|
||||
class JMSModelViewSet(SerializerMixin2,
|
||||
QuerySetMixin,
|
||||
ExtraFilterFieldsMixin,
|
||||
PaginatedResponseMixin,
|
||||
class JMSModelViewSet(CommonMixin,
|
||||
ModelViewSet):
|
||||
pass
|
||||
|
||||
|
||||
class JMSBulkModelViewSet(SerializerMixin2,
|
||||
QuerySetMixin,
|
||||
ExtraFilterFieldsMixin,
|
||||
PaginatedResponseMixin,
|
||||
class JMSBulkModelViewSet(CommonMixin,
|
||||
AllowBulkDestoryMixin,
|
||||
BulkModelViewSet):
|
||||
pass
|
||||
|
||||
|
||||
class JMSBulkRelationModelViewSet(SerializerMixin2,
|
||||
QuerySetMixin,
|
||||
ExtraFilterFieldsMixin,
|
||||
PaginatedResponseMixin,
|
||||
class JMSBulkRelationModelViewSet(CommonMixin,
|
||||
RelationMixin,
|
||||
AllowBulkDestoryMixin,
|
||||
BulkModelViewSet):
|
||||
|
||||
@@ -19,7 +19,10 @@ def extract_object_name(exc, index=0):
|
||||
`No User matches the given query.`
|
||||
提取 `User`,`index=1`
|
||||
"""
|
||||
(msg, *_) = exc.args
|
||||
if exc.args:
|
||||
(msg, *others) = exc.args
|
||||
else:
|
||||
return gettext('Object')
|
||||
return gettext(msg.split(sep=' ', maxsplit=index + 1)[index])
|
||||
|
||||
|
||||
|
||||
@@ -6,11 +6,25 @@ from rest_framework.serializers import ValidationError
|
||||
from rest_framework.compat import coreapi, coreschema
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django_filters import rest_framework as drf_filters
|
||||
import logging
|
||||
|
||||
from common import const
|
||||
|
||||
__all__ = ["DatetimeRangeFilter", "IDSpmFilter", 'IDInFilter', "CustomFilter"]
|
||||
__all__ = [
|
||||
"DatetimeRangeFilter", "IDSpmFilter", 'IDInFilter', "CustomFilter",
|
||||
"BaseFilterSet"
|
||||
]
|
||||
|
||||
|
||||
class BaseFilterSet(drf_filters.FilterSet):
|
||||
def do_nothing(self, queryset, name, value):
|
||||
return queryset
|
||||
|
||||
def get_query_param(self, k, default=None):
|
||||
if k in self.form.data:
|
||||
return self.form.cleaned_data[k]
|
||||
return default
|
||||
|
||||
|
||||
class DatetimeRangeFilter(filters.BaseFilterBackend):
|
||||
@@ -94,11 +108,11 @@ class IDSpmFilter(filters.BaseFilterBackend):
|
||||
spm = request.query_params.get('spm')
|
||||
if not spm:
|
||||
return queryset
|
||||
cache_key = const.KEY_CACHE_RESOURCES_ID.format(spm)
|
||||
resources_id = cache.get(cache_key)
|
||||
if resources_id is None or not isinstance(resources_id, list):
|
||||
cache_key = const.KEY_CACHE_RESOURCE_IDS.format(spm)
|
||||
resource_ids = cache.get(cache_key)
|
||||
if resource_ids is None or not isinstance(resource_ids, list):
|
||||
return queryset
|
||||
queryset = queryset.filter(id__in=resources_id)
|
||||
queryset = queryset.filter(id__in=resource_ids)
|
||||
return queryset
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
import datetime
|
||||
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import Http404
|
||||
@@ -21,7 +22,7 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
||||
attrs = [
|
||||
'read_only', 'label', 'help_text',
|
||||
'min_length', 'max_length',
|
||||
'min_value', 'max_value', "write_only"
|
||||
'min_value', 'max_value', "write_only",
|
||||
]
|
||||
|
||||
def determine_actions(self, request, view):
|
||||
@@ -59,9 +60,10 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
|
||||
default = getattr(field, 'default', False)
|
||||
if default and isinstance(default, (str, int)):
|
||||
field_info['default'] = default
|
||||
default = getattr(field, 'default', None)
|
||||
if default is not None and default != empty:
|
||||
if isinstance(default, (str, int, bool, datetime.datetime, list)):
|
||||
field_info['default'] = default
|
||||
|
||||
for attr in self.attrs:
|
||||
value = getattr(field, attr, None)
|
||||
@@ -95,6 +97,8 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
||||
fields = view.filterset_fields
|
||||
elif hasattr(view, 'get_filterset_fields'):
|
||||
fields = view.get_filterset_fields(request)
|
||||
elif hasattr(view, 'filterset_class'):
|
||||
fields = view.filterset_class.Meta.fields
|
||||
|
||||
if isinstance(fields, dict):
|
||||
fields = list(fields.keys())
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user