mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-16 00:52:41 +00:00
Compare commits
240 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9652d56ff | ||
|
|
97262645be | ||
|
|
82077a3ae1 | ||
|
|
d2760b98f6 | ||
|
|
5f47a072b6 | ||
|
|
25201e295d | ||
|
|
a802fb792f | ||
|
|
1646f9b27b | ||
|
|
3f4877f26b | ||
|
|
0e4d778335 | ||
|
|
52d20080ff | ||
|
|
ed8d72c06b | ||
|
|
5e9e3ec6f6 | ||
|
|
4f5f92deb8 | ||
|
|
d2a15ee702 | ||
|
|
a3a591da4b | ||
|
|
3f2925116e | ||
|
|
c3e2e536e0 | ||
|
|
8c133d5fdb | ||
|
|
89d8efe0f1 | ||
|
|
54303ea33f | ||
|
|
4dcd8dd8dd | ||
|
|
4f04a7d258 | ||
|
|
bf308e24b6 | ||
|
|
b3642f3ff4 | ||
|
|
3aed4955c8 | ||
|
|
9a5f9a9c92 | ||
|
|
6d5bec1ef2 | ||
|
|
e93fd1fd44 | ||
|
|
7bf37611bd | ||
|
|
b8ec4bfaa5 | ||
|
|
58b6293b76 | ||
|
|
8e12eebceb | ||
|
|
72d6ea43fa | ||
|
|
deedd49dc5 | ||
|
|
a36e6fbf84 | ||
|
|
b57453cc3c | ||
|
|
62f2909d59 | ||
|
|
0d469ff95b | ||
|
|
ca883f1fb4 | ||
|
|
6e0fbd78e7 | ||
|
|
0813cff74f | ||
|
|
ff428b84f9 | ||
|
|
d0c9aa2c55 | ||
|
|
1d5e603c0d | ||
|
|
ddbbc8df17 | ||
|
|
90df404931 | ||
|
|
b9cbff1a5f | ||
|
|
b9717eece3 | ||
|
|
f9cf2a243b | ||
|
|
e056430fce | ||
|
|
2b2821c0a1 | ||
|
|
213221beae | ||
|
|
2db9c90a74 | ||
|
|
8ced6f1168 | ||
|
|
6703ab9a77 | ||
|
|
2fc6e6cd54 | ||
|
|
2176fd8fac | ||
|
|
856e7c16e5 | ||
|
|
d4feaf1e08 | ||
|
|
5aee2ce3db | ||
|
|
4424c4bde2 | ||
|
|
5863e3e008 | ||
|
|
79a371eb6c | ||
|
|
7c7de96158 | ||
|
|
80b03e73f6 | ||
|
|
32dbab2e34 | ||
|
|
b189e363cc | ||
|
|
4c3a655239 | ||
|
|
5533114db5 | ||
|
|
4c469afa95 | ||
|
|
2ccc5beeda | ||
|
|
4b67d6925e | ||
|
|
dd979f582a | ||
|
|
042ea5e137 | ||
|
|
2a6f68c7ba | ||
|
|
43b5e97b95 | ||
|
|
619b521ea1 | ||
|
|
3447eeda68 | ||
|
|
75ef413ea5 | ||
|
|
662c9092dc | ||
|
|
c8d54b28e2 | ||
|
|
96cd307d1f | ||
|
|
6385cb3f86 | ||
|
|
36e9d8101a | ||
|
|
3354ab8ce9 | ||
|
|
89ec6ba6ef | ||
|
|
af40e46a75 | ||
|
|
86fcd3c251 | ||
|
|
c2d5928273 | ||
|
|
e656ba70ec | ||
|
|
bb807e6251 | ||
|
|
bbd6cae3d7 | ||
|
|
c3b09dd800 | ||
|
|
6d427b9834 | ||
|
|
610aaf5244 | ||
|
|
df2f1b3e6e | ||
|
|
f26b7a470a | ||
|
|
a4667f3312 | ||
|
|
91081d9423 | ||
|
|
3041697edc | ||
|
|
75d7530ea5 | ||
|
|
975cc41bce | ||
|
|
439999381d | ||
|
|
39ab5978be | ||
|
|
7be7c8cee1 | ||
|
|
68b22cbdec | ||
|
|
a7c704bea3 | ||
|
|
21993b0d89 | ||
|
|
73ccf3be5f | ||
|
|
bf3056abc4 | ||
|
|
f2fd9f5990 | ||
|
|
6d39a51c36 | ||
|
|
7fa94008c9 | ||
|
|
9685a25dc6 | ||
|
|
1af4fcd381 | ||
|
|
177055acdc | ||
|
|
6ec0b3ad54 | ||
|
|
49dd611292 | ||
|
|
f557c1dace | ||
|
|
6e87b94789 | ||
|
|
b0dba35e5a | ||
|
|
d0b19f20c3 | ||
|
|
3e78d627f8 | ||
|
|
0763404235 | ||
|
|
31cd441a34 | ||
|
|
40c0aac5a9 | ||
|
|
83099dcd16 | ||
|
|
0db72bd00f | ||
|
|
732b8cc0b8 | ||
|
|
a9f90b4e31 | ||
|
|
cf1fbabca1 | ||
|
|
cbcefe8bd3 | ||
|
|
133a2e4714 | ||
|
|
b4b9149d5d | ||
|
|
9af4d5f76f | ||
|
|
96d26cc96f | ||
|
|
18d8f59bb5 | ||
|
|
841f707b6d | ||
|
|
448c5db3bb | ||
|
|
75b886675e | ||
|
|
24e22115de | ||
|
|
b18ead0ffa | ||
|
|
6e8922da1c | ||
|
|
dcb38ef534 | ||
|
|
8a693d9fa7 | ||
|
|
487932590b | ||
|
|
79b5aa68c8 | ||
|
|
50a4735b07 | ||
|
|
1183109354 | ||
|
|
202e619c4b | ||
|
|
179cb7531c | ||
|
|
987f840431 | ||
|
|
f04544e8df | ||
|
|
cd6dc6a722 | ||
|
|
150552d734 | ||
|
|
388314ca5a | ||
|
|
26d00329e7 | ||
|
|
e93be8f828 | ||
|
|
2690092faf | ||
|
|
eabaae81ac | ||
|
|
6df331cbed | ||
|
|
0390e37fd5 | ||
|
|
44d9aff573 | ||
|
|
2b4f8bd11c | ||
|
|
231332585d | ||
|
|
531de188d6 | ||
|
|
0c1f717fb2 | ||
|
|
9d9177ed05 | ||
|
|
ab77d5db4b | ||
|
|
eadecb83ed | ||
|
|
5d6088abd3 | ||
|
|
38f7c123e5 | ||
|
|
d7daf7071a | ||
|
|
795245d7f4 | ||
|
|
7ea2a0d6a5 | ||
|
|
c90b9d70dc | ||
|
|
f6c24f809c | ||
|
|
e369a8d51f | ||
|
|
c74c9f51f0 | ||
|
|
57bf9ca8b1 | ||
|
|
ddc2d1106b | ||
|
|
15992c636a | ||
|
|
36cd18ab9a | ||
|
|
676ee93837 | ||
|
|
c02f8e499b | ||
|
|
4ebb4d1b6d | ||
|
|
5e7650d719 | ||
|
|
bf302f47e5 | ||
|
|
1ddc228449 | ||
|
|
c9065fd96e | ||
|
|
4a09dc6e3e | ||
|
|
55bfb942e2 | ||
|
|
9aed51ffe9 | ||
|
|
a98816462f | ||
|
|
abe32e6c79 | ||
|
|
77c8ca5863 | ||
|
|
8fa15b3378 | ||
|
|
a3507975fb | ||
|
|
76ca6d587d | ||
|
|
038582a8c1 | ||
|
|
ca2fc3cb5e | ||
|
|
cc30b766f8 | ||
|
|
b7bd88b8a0 | ||
|
|
5518e1e00f | ||
|
|
0632e88f5d | ||
|
|
9dc2255894 | ||
|
|
1baf35004d | ||
|
|
5acff310f7 | ||
|
|
fdded8b90f | ||
|
|
1d550cbe64 | ||
|
|
4847b7a680 | ||
|
|
1c551b4fe8 | ||
|
|
6ffba739f2 | ||
|
|
0282346945 | ||
|
|
f6d9af8beb | ||
|
|
ba4e6e9a9f | ||
|
|
874a3eeebf | ||
|
|
dd793a4eca | ||
|
|
f7e6c14bc5 | ||
|
|
f6031d6f5d | ||
|
|
5e779e6542 | ||
|
|
7031b7f28b | ||
|
|
e2f540a1f4 | ||
|
|
108a1da212 | ||
|
|
b4a8cb768b | ||
|
|
6b2f606430 | ||
|
|
70a8db895d | ||
|
|
0043dc6110 | ||
|
|
87d2798612 | ||
|
|
e2d8eee629 | ||
|
|
8404db8cef | ||
|
|
fd7f379b10 | ||
|
|
111c63ee6a | ||
|
|
4eb5d51840 | ||
|
|
7f53a80855 | ||
|
|
90afabdcb2 | ||
|
|
de405be753 | ||
|
|
f84b845385 | ||
|
|
b1ac3fa94f |
5
.github/ISSUE_TEMPLATE/----.md
vendored
5
.github/ISSUE_TEMPLATE/----.md
vendored
@@ -2,8 +2,9 @@
|
||||
name: 需求建议
|
||||
about: 提出针对本项目的想法和建议
|
||||
title: "[Feature] "
|
||||
labels: 待处理, 需求
|
||||
assignees: 'ibuler'
|
||||
labels: 类型:需求
|
||||
assignees: ibuler
|
||||
|
||||
---
|
||||
|
||||
**请描述您的需求或者改进建议.**
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/bug---.md
vendored
2
.github/ISSUE_TEMPLATE/bug---.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Bug 提交
|
||||
about: 提交产品缺陷帮助我们更好的改进
|
||||
title: "[Bug] "
|
||||
labels: bug, 待处理
|
||||
labels: 类型:bug
|
||||
assignees: wojiushixiaobai
|
||||
|
||||
---
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/question.md
vendored
2
.github/ISSUE_TEMPLATE/question.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: 问题咨询
|
||||
about: 提出针对本项目安装部署、使用及其他方面的相关问题
|
||||
title: "[Question] "
|
||||
labels: 提问, 待处理
|
||||
labels: 类型:提问
|
||||
assignees: wojiushixiaobai
|
||||
|
||||
---
|
||||
|
||||
38
Dockerfile
38
Dockerfile
@@ -1,5 +1,6 @@
|
||||
FROM registry.fit2cloud.com/public/python:v3 as stage-build
|
||||
MAINTAINER Jumpserver Team <ibuler@qq.com>
|
||||
# 编译代码
|
||||
FROM python:3.8.6-slim as stage-build
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
@@ -8,33 +9,38 @@ ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
|
||||
FROM registry.fit2cloud.com/public/python:v3
|
||||
# 构建运行时环境
|
||||
FROM python:3.8.6-slim
|
||||
ARG PIP_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_MIRROR=$PIP_MIRROR
|
||||
ARG MYSQL_MIRROR=https://mirrors.tuna.tsinghua.edu.cn/mysql/yum/mysql57-community-el6/
|
||||
ENV MYSQL_MIRROR=$MYSQL_MIRROR
|
||||
ARG PIP_JMS_MIRROR=https://pypi.douban.com/simple
|
||||
ENV PIP_JMS_MIRROR=$PIP_JMS_MIRROR
|
||||
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
COPY ./requirements ./requirements
|
||||
RUN useradd jumpserver
|
||||
RUN yum -y install epel-release && \
|
||||
echo -e "[mysql]\nname=mysql\nbaseurl=${MYSQL_MIRROR}\ngpgcheck=0\nenabled=1" > /etc/yum.repos.d/mysql.repo
|
||||
RUN yum -y install $(cat requirements/rpm_requirements.txt)
|
||||
RUN pip install --upgrade pip setuptools==49.6.0 wheel -i ${PIP_MIRROR} && \
|
||||
pip config set global.index-url ${PIP_MIRROR}
|
||||
RUN pip install $(grep 'jms' requirements/requirements.txt) -i https://pypi.org/simple
|
||||
RUN pip install -r requirements/requirements.txt
|
||||
COPY ./requirements/deb_buster_requirements.txt ./requirements/deb_buster_requirements.txt
|
||||
RUN sed -i 's/deb.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& sed -i 's/security.debian.org/mirrors.aliyun.com/g' /etc/apt/sources.list \
|
||||
&& apt update \
|
||||
&& grep -v '^#' ./requirements/deb_buster_requirements.txt | xargs apt -y install \
|
||||
&& localedef -c -f UTF-8 -i zh_CN zh_CN.UTF-8 \
|
||||
&& cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
|
||||
|
||||
COPY ./requirements/requirements.txt ./requirements/requirements.txt
|
||||
RUN pip install --upgrade pip==20.2.4 setuptools==49.6.0 wheel==0.34.2 -i ${PIP_MIRROR} \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --no-cache-dir $(grep 'jms' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install --no-cache-dir -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN mkdir -p /root/.ssh/ && echo -e "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config
|
||||
RUN mkdir -p /root/.ssh/ \
|
||||
&& echo "Host *\n\tStrictHostKeyChecking no\n\tUserKnownHostsFile /dev/null" > /root/.ssh/config
|
||||
|
||||
RUN echo > config.yml
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
ENV LANG=zh_CN.UTF-8
|
||||
ENV LC_ALL=zh_CN.UTF-8
|
||||
|
||||
EXPOSE 8070
|
||||
EXPOSE 8080
|
||||
|
||||
58
README.md
58
README.md
@@ -25,7 +25,8 @@ JumpServer 采纳分布式架构,支持多机房跨区域部署,支持横向
|
||||
- 无插件: 仅需浏览器,极致的 Web Terminal 使用体验;
|
||||
- 多云支持: 一套系统,同时管理不同云上面的资产;
|
||||
- 云端存储: 审计录像云端存储,永不丢失;
|
||||
- 多租户: 一套系统,多个子公司和部门同时使用。
|
||||
- 多租户: 一套系统,多个子公司和部门同时使用;
|
||||
- 多应用支持: 数据库,Windows远程应用,Kubernetes。
|
||||
|
||||
## 版本说明
|
||||
|
||||
@@ -198,13 +199,61 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
<td>文件传输</td>
|
||||
<td>可对文件的上传、下载记录进行审计</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="20">数据库审计<br>Database</td>
|
||||
<td rowspan="2">连接方式</td>
|
||||
<td>命令方式</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Web UI方式 (X-PACK)</td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td rowspan="4">支持的数据库</td>
|
||||
<td>MySQL</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Oracle (X-PACK)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>MariaDB (X-PACK)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>PostgreSQL (X-PACK)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="6">功能亮点</td>
|
||||
<td>语法高亮</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>SQL格式化</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>支持快捷键</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>支持选中执行</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>SQL历史查询</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>支持页面创建 DB, TABLE</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td rowspan="2">会话审计</td>
|
||||
<td>命令记录</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>录像回放</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## 快速开始
|
||||
|
||||
- [极速安装](https://docs.jumpserver.org/zh/master/install/setup_by_fast/)
|
||||
- [完整文档](https://docs.jumpserver.org)
|
||||
- [演示视频](https://jumpserver.oss-cn-hangzhou.aliyuncs.com/jms-media/%E3%80%90%E6%BC%94%E7%A4%BA%E8%A7%86%E9%A2%91%E3%80%91Jumpserver%20%E5%A0%A1%E5%9E%92%E6%9C%BA%20V1.5.0%20%E6%BC%94%E7%A4%BA%E8%A7%86%E9%A2%91%20-%20final.mp4)
|
||||
- [演示视频](https://www.bilibili.com/video/BV1ZV41127GB)
|
||||
|
||||
## 组件项目
|
||||
- [Lina](https://github.com/jumpserver/lina) JumpServer Web UI 项目
|
||||
@@ -212,6 +261,11 @@ v2.1.0 是 v2.0.0 之后的功能版本。
|
||||
- [Koko](https://github.com/jumpserver/koko) JumpServer 字符协议 Connector 项目,替代原来 Python 版本的 [Coco](https://github.com/jumpserver/coco)
|
||||
- [Guacamole](https://github.com/jumpserver/docker-guacamole) JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/)
|
||||
|
||||
## 致谢
|
||||
- [Apache Guacamole](https://guacamole.apache.org/) Web页面连接 RDP, SSH, VNC协议设备,JumpServer 图形化连接依赖
|
||||
- [OmniDB](https://omnidb.org/) Web页面连接使用数据库,JumpServer Web数据库依赖
|
||||
|
||||
|
||||
## JumpServer 企业版
|
||||
- [申请企业版试用](https://jinshuju.net/f/kyOYpi)
|
||||
> 注:企业版支持离线安装,申请通过后会提供高速下载链接。
|
||||
|
||||
2
apps/.gitattributes
vendored
Normal file
2
apps/.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
*.js linguist-language=python
|
||||
*.html linguist-language=python
|
||||
@@ -1,3 +1,5 @@
|
||||
from .application import *
|
||||
from .mixin import *
|
||||
from .remote_app import *
|
||||
from .database_app import *
|
||||
from .k8s_app import *
|
||||
|
||||
20
apps/applications/api/application.py
Normal file
20
apps/applications/api/application.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
|
||||
from .mixin import ApplicationAttrsSerializerViewMixin
|
||||
from ..hands import IsOrgAdminOrAppUser
|
||||
from .. import models, serializers
|
||||
|
||||
__all__ = [
|
||||
'ApplicationViewSet',
|
||||
]
|
||||
|
||||
|
||||
class ApplicationViewSet(ApplicationAttrsSerializerViewMixin, OrgBulkModelViewSet):
|
||||
model = models.Application
|
||||
filter_fields = ('name', 'type', 'category')
|
||||
search_fields = filter_fields
|
||||
permission_classes = (IsOrgAdminOrAppUser,)
|
||||
serializer_class = serializers.ApplicationSerializer
|
||||
139
apps/applications/api/mixin.py
Normal file
139
apps/applications/api/mixin.py
Normal file
@@ -0,0 +1,139 @@
|
||||
import uuid
|
||||
|
||||
from common.exceptions import JMSException
|
||||
from orgs.models import Organization
|
||||
from .. import models
|
||||
|
||||
|
||||
class ApplicationAttrsSerializerViewMixin:
|
||||
|
||||
def get_serializer_class(self):
|
||||
serializer_class = super().get_serializer_class()
|
||||
if getattr(self, 'swagger_fake_view', False):
|
||||
return serializer_class
|
||||
app_type = self.request.query_params.get('type')
|
||||
app_category = self.request.query_params.get('category')
|
||||
type_options = list(dict(models.Category.get_all_type_serializer_mapper()).keys())
|
||||
category_options = list(dict(models.Category.get_category_serializer_mapper()).keys())
|
||||
|
||||
# ListAPIView 没有 action 属性
|
||||
# 不使用method属性,因为options请求时为method为post
|
||||
action = getattr(self, 'action', 'list')
|
||||
|
||||
if app_type and app_type not in type_options:
|
||||
raise JMSException(
|
||||
'Invalid query parameter `type`, select from the following options: {}'
|
||||
''.format(type_options)
|
||||
)
|
||||
if app_category and app_category not in category_options:
|
||||
raise JMSException(
|
||||
'Invalid query parameter `category`, select from the following options: {}'
|
||||
''.format(category_options)
|
||||
)
|
||||
|
||||
if action in [
|
||||
'create', 'update', 'partial_update', 'bulk_update', 'partial_bulk_update'
|
||||
] and not app_type:
|
||||
# action: create / update
|
||||
raise JMSException(
|
||||
'The `{}` action must take the `type` query parameter'.format(action)
|
||||
)
|
||||
|
||||
if app_type:
|
||||
# action: create / update / list / retrieve / metadata
|
||||
attrs_cls = models.Category.get_type_serializer_cls(app_type)
|
||||
class_name = 'ApplicationDynamicSerializer{}'.format(app_type.title())
|
||||
elif app_category:
|
||||
# action: list / retrieve / metadata
|
||||
attrs_cls = models.Category.get_category_serializer_cls(app_category)
|
||||
class_name = 'ApplicationDynamicSerializer{}'.format(app_category.title())
|
||||
else:
|
||||
attrs_cls = models.Category.get_no_password_serializer_cls()
|
||||
class_name = 'ApplicationDynamicSerializer'
|
||||
cls = type(class_name, (serializer_class,), {'attrs': attrs_cls()})
|
||||
return cls
|
||||
|
||||
|
||||
class SerializeApplicationToTreeNodeMixin:
|
||||
|
||||
@staticmethod
|
||||
def _serialize_db(db):
|
||||
return {
|
||||
'id': db.id,
|
||||
'name': db.name,
|
||||
'title': db.name,
|
||||
'pId': '',
|
||||
'open': False,
|
||||
'iconSkin': 'database',
|
||||
'meta': {'type': 'database_app'}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_remote_app(remote_app):
|
||||
return {
|
||||
'id': remote_app.id,
|
||||
'name': remote_app.name,
|
||||
'title': remote_app.name,
|
||||
'pId': '',
|
||||
'open': False,
|
||||
'isParent': False,
|
||||
'iconSkin': 'chrome',
|
||||
'meta': {'type': 'remote_app'}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def _serialize_cloud(cloud):
|
||||
return {
|
||||
'id': cloud.id,
|
||||
'name': cloud.name,
|
||||
'title': cloud.name,
|
||||
'pId': '',
|
||||
'open': False,
|
||||
'isParent': False,
|
||||
'iconSkin': 'k8s',
|
||||
'meta': {'type': 'k8s_app'}
|
||||
}
|
||||
|
||||
def _serialize_application(self, application):
|
||||
method_name = f'_serialize_{application.category}'
|
||||
data = getattr(self, method_name)(application)
|
||||
data.update({
|
||||
'pId': application.org.id,
|
||||
'org_name': application.org_name
|
||||
})
|
||||
return data
|
||||
|
||||
def serialize_applications(self, applications):
|
||||
data = [self._serialize_application(application) for application in applications]
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def _serialize_organization(org):
|
||||
return {
|
||||
'id': org.id,
|
||||
'name': org.name,
|
||||
'title': org.name,
|
||||
'pId': '',
|
||||
'open': True,
|
||||
'isParent': True,
|
||||
'meta': {
|
||||
'type': 'node'
|
||||
}
|
||||
}
|
||||
|
||||
def serialize_organizations(self, organizations):
|
||||
data = [self._serialize_organization(org) for org in organizations]
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def filter_organizations(applications):
|
||||
organizations_id = set(applications.values_list('org_id', flat=True))
|
||||
organizations = [Organization.get_instance(org_id) for org_id in organizations_id]
|
||||
return organizations
|
||||
|
||||
def serialize_applications_with_org(self, applications):
|
||||
organizations = self.filter_organizations(applications)
|
||||
data_organizations = self.serialize_organizations(organizations)
|
||||
data_applications = self.serialize_applications(applications)
|
||||
data = data_organizations + data_applications
|
||||
return data
|
||||
@@ -3,8 +3,9 @@
|
||||
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.mixins import generics
|
||||
from common.exceptions import JMSException
|
||||
from ..hands import IsOrgAdmin, IsAppUser
|
||||
from ..models import RemoteApp
|
||||
from .. import models
|
||||
from ..serializers import RemoteAppSerializer, RemoteAppConnectionInfoSerializer
|
||||
|
||||
|
||||
@@ -14,7 +15,7 @@ __all__ = [
|
||||
|
||||
|
||||
class RemoteAppViewSet(OrgBulkModelViewSet):
|
||||
model = RemoteApp
|
||||
model = models.RemoteApp
|
||||
filter_fields = ('name', 'type', 'comment')
|
||||
search_fields = filter_fields
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
@@ -22,6 +23,18 @@ class RemoteAppViewSet(OrgBulkModelViewSet):
|
||||
|
||||
|
||||
class RemoteAppConnectionInfoApi(generics.RetrieveAPIView):
|
||||
model = RemoteApp
|
||||
model = models.Application
|
||||
permission_classes = (IsAppUser, )
|
||||
serializer_class = RemoteAppConnectionInfoSerializer
|
||||
|
||||
@staticmethod
|
||||
def check_category_allowed(obj):
|
||||
if not obj.category_is_remote_app:
|
||||
raise JMSException(
|
||||
'The request instance(`{}`) is not of category `remote_app`'.format(obj.category)
|
||||
)
|
||||
|
||||
def get_object(self):
|
||||
obj = super().get_object()
|
||||
self.check_category_allowed(obj)
|
||||
return obj
|
||||
|
||||
140
apps/applications/migrations/0006_application.py
Normal file
140
apps/applications/migrations/0006_application.py
Normal file
@@ -0,0 +1,140 @@
|
||||
# Generated by Django 2.2.13 on 2020-10-19 12:01
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django_mysql.models
|
||||
import uuid
|
||||
|
||||
|
||||
CATEGORY_DB_LIST = ['mysql', 'oracle', 'postgresql', 'mariadb']
|
||||
CATEGORY_REMOTE_LIST = ['chrome', 'mysql_workbench', 'vmware_client', 'custom']
|
||||
CATEGORY_CLOUD_LIST = ['k8s']
|
||||
|
||||
CATEGORY_DB = 'db'
|
||||
CATEGORY_REMOTE = 'remote_app'
|
||||
CATEGORY_CLOUD = 'cloud'
|
||||
CATEGORY_LIST = [CATEGORY_DB, CATEGORY_REMOTE, CATEGORY_CLOUD]
|
||||
|
||||
|
||||
def get_application_category(old_app):
|
||||
_type = old_app.type
|
||||
if _type in CATEGORY_DB_LIST:
|
||||
category = CATEGORY_DB
|
||||
elif _type in CATEGORY_REMOTE_LIST:
|
||||
category = CATEGORY_REMOTE
|
||||
elif _type in CATEGORY_CLOUD_LIST:
|
||||
category = CATEGORY_CLOUD
|
||||
else:
|
||||
category = None
|
||||
return category
|
||||
|
||||
|
||||
def common_to_application_json(old_app):
|
||||
category = get_application_category(old_app)
|
||||
date_updated = old_app.date_updated if hasattr(old_app, 'date_updated') else old_app.date_created
|
||||
return {
|
||||
'id': old_app.id,
|
||||
'name': old_app.name,
|
||||
'type': old_app.type,
|
||||
'category': category,
|
||||
'comment': old_app.comment,
|
||||
'created_by': old_app.created_by,
|
||||
'date_created': old_app.date_created,
|
||||
'date_updated': date_updated,
|
||||
'org_id': old_app.org_id
|
||||
}
|
||||
|
||||
|
||||
def db_to_application_json(database):
|
||||
app_json = common_to_application_json(database)
|
||||
app_json.update({
|
||||
'attrs': {
|
||||
'host': database.host,
|
||||
'port': database.port,
|
||||
'database': database.database
|
||||
}
|
||||
})
|
||||
return app_json
|
||||
|
||||
|
||||
def remote_to_application_json(remote):
|
||||
app_json = common_to_application_json(remote)
|
||||
attrs = {
|
||||
'asset': str(remote.asset.id),
|
||||
'path': remote.path,
|
||||
}
|
||||
attrs.update(remote.params)
|
||||
app_json.update({
|
||||
'attrs': attrs
|
||||
})
|
||||
return app_json
|
||||
|
||||
|
||||
def k8s_to_application_json(k8s):
|
||||
app_json = common_to_application_json(k8s)
|
||||
app_json.update({
|
||||
'attrs': {
|
||||
'cluster': k8s.cluster
|
||||
}
|
||||
})
|
||||
return app_json
|
||||
|
||||
|
||||
def migrate_and_integrate_applications(apps, schema_editor):
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
database_app_model = apps.get_model("applications", "DatabaseApp")
|
||||
remote_app_model = apps.get_model("applications", "RemoteApp")
|
||||
k8s_app_model = apps.get_model("applications", "K8sApp")
|
||||
|
||||
database_apps = database_app_model.objects.using(db_alias).all()
|
||||
remote_apps = remote_app_model.objects.using(db_alias).all()
|
||||
k8s_apps = k8s_app_model.objects.using(db_alias).all()
|
||||
|
||||
database_applications = [db_to_application_json(db_app) for db_app in database_apps]
|
||||
remote_applications = [remote_to_application_json(remote_app) for remote_app in remote_apps]
|
||||
k8s_applications = [k8s_to_application_json(k8s_app) for k8s_app in k8s_apps]
|
||||
|
||||
applications_json = database_applications + remote_applications + k8s_applications
|
||||
application_model = apps.get_model("applications", "Application")
|
||||
applications = [
|
||||
application_model(**application_json)
|
||||
for application_json in applications_json
|
||||
if application_json['category'] in CATEGORY_LIST
|
||||
]
|
||||
for application in applications:
|
||||
if application_model.objects.using(db_alias).filter(name=application.name).exists():
|
||||
application.name = '{}-{}'.format(application.name, application.type)
|
||||
application.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0057_fill_node_value_assets_amount_and_parent_key'),
|
||||
('applications', '0005_k8sapp'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Application',
|
||||
fields=[
|
||||
('org_id', models.CharField(blank=True, db_index=True, default='', max_length=36, verbose_name='Organization')),
|
||||
('id', models.UUIDField(default=uuid.uuid4, primary_key=True, serialize=False)),
|
||||
('created_by', models.CharField(blank=True, max_length=32, null=True, verbose_name='Created by')),
|
||||
('date_created', models.DateTimeField(auto_now_add=True, null=True, verbose_name='Date created')),
|
||||
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date updated')),
|
||||
('name', models.CharField(max_length=128, verbose_name='Name')),
|
||||
('category', models.CharField(choices=[('db', 'Database'), ('remote_app', 'Remote app'), ('cloud', 'Cloud')], max_length=16, verbose_name='Category')),
|
||||
('type', models.CharField(choices=[('mysql', 'MySQL'), ('oracle', 'Oracle'), ('postgresql', 'PostgreSQL'), ('mariadb', 'MariaDB'), ('chrome', 'Chrome'), ('mysql_workbench', 'MySQL Workbench'), ('vmware_client', 'vSphere Client'), ('custom', 'Custom'), ('k8s', 'Kubernetes')], max_length=16, verbose_name='Type')),
|
||||
('attrs', django_mysql.models.JSONField(default=dict)),
|
||||
('comment', models.TextField(blank=True, default='', max_length=128, verbose_name='Comment')),
|
||||
('domain', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='applications', to='assets.Domain', verbose_name='Domain')),
|
||||
],
|
||||
options={
|
||||
'ordering': ('name',),
|
||||
'unique_together': {('org_id', 'name')},
|
||||
},
|
||||
),
|
||||
migrations.RunPython(migrate_and_integrate_applications),
|
||||
]
|
||||
18
apps/applications/migrations/0007_auto_20201119_1110.py
Normal file
18
apps/applications/migrations/0007_auto_20201119_1110.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1 on 2020-11-19 03:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('applications', '0006_application'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='application',
|
||||
name='attrs',
|
||||
field=models.JSONField(),
|
||||
),
|
||||
]
|
||||
@@ -1,3 +1,4 @@
|
||||
from .application import *
|
||||
from .remote_app import *
|
||||
from .database_app import *
|
||||
from .k8s_app import *
|
||||
|
||||
140
apps/applications/models/application.py
Normal file
140
apps/applications/models/application.py
Normal file
@@ -0,0 +1,140 @@
|
||||
from itertools import chain
|
||||
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from common.mixins import CommonModelMixin
|
||||
from common.db.models import ChoiceSet
|
||||
|
||||
|
||||
class DBType(ChoiceSet):
|
||||
mysql = 'mysql', 'MySQL'
|
||||
oracle = 'oracle', 'Oracle'
|
||||
pgsql = 'postgresql', 'PostgreSQL'
|
||||
mariadb = 'mariadb', 'MariaDB'
|
||||
|
||||
@classmethod
|
||||
def get_type_serializer_cls_mapper(cls):
|
||||
from ..serializers import database_app
|
||||
mapper = {
|
||||
cls.mysql: database_app.MySQLAttrsSerializer,
|
||||
cls.oracle: database_app.OracleAttrsSerializer,
|
||||
cls.pgsql: database_app.PostgreAttrsSerializer,
|
||||
cls.mariadb: database_app.MariaDBAttrsSerializer,
|
||||
}
|
||||
return mapper
|
||||
|
||||
|
||||
class RemoteAppType(ChoiceSet):
|
||||
chrome = 'chrome', 'Chrome'
|
||||
mysql_workbench = 'mysql_workbench', 'MySQL Workbench'
|
||||
vmware_client = 'vmware_client', 'vSphere Client'
|
||||
custom = 'custom', _('Custom')
|
||||
|
||||
@classmethod
|
||||
def get_type_serializer_cls_mapper(cls):
|
||||
from ..serializers import remote_app
|
||||
mapper = {
|
||||
cls.chrome: remote_app.ChromeAttrsSerializer,
|
||||
cls.mysql_workbench: remote_app.MySQLWorkbenchAttrsSerializer,
|
||||
cls.vmware_client: remote_app.VMwareClientAttrsSerializer,
|
||||
cls.custom: remote_app.CustomRemoteAppAttrsSeralizers,
|
||||
}
|
||||
return mapper
|
||||
|
||||
|
||||
class CloudType(ChoiceSet):
|
||||
k8s = 'k8s', 'Kubernetes'
|
||||
|
||||
@classmethod
|
||||
def get_type_serializer_cls_mapper(cls):
|
||||
from ..serializers import k8s_app
|
||||
mapper = {
|
||||
cls.k8s: k8s_app.K8sAttrsSerializer,
|
||||
}
|
||||
return mapper
|
||||
|
||||
|
||||
class Category(ChoiceSet):
|
||||
db = 'db', _('Database')
|
||||
remote_app = 'remote_app', _('Remote app')
|
||||
cloud = 'cloud', 'Cloud'
|
||||
|
||||
@classmethod
|
||||
def get_category_type_mapper(cls):
|
||||
return {
|
||||
cls.db: DBType,
|
||||
cls.remote_app: RemoteAppType,
|
||||
cls.cloud: CloudType
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_category_type_choices_mapper(cls):
|
||||
return {
|
||||
name: tp.choices
|
||||
for name, tp in cls.get_category_type_mapper().items()
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_type_choices(cls, category):
|
||||
return cls.get_category_type_choices_mapper().get(category, [])
|
||||
|
||||
@classmethod
|
||||
def get_all_type_choices(cls):
|
||||
all_grouped_choices = tuple(cls.get_category_type_choices_mapper().values())
|
||||
return tuple(chain(*all_grouped_choices))
|
||||
|
||||
@classmethod
|
||||
def get_all_type_serializer_mapper(cls):
|
||||
mapper = {}
|
||||
for tp in cls.get_category_type_mapper().values():
|
||||
mapper.update(tp.get_type_serializer_cls_mapper())
|
||||
return mapper
|
||||
|
||||
@classmethod
|
||||
def get_type_serializer_cls(cls, tp):
|
||||
mapper = cls.get_all_type_serializer_mapper()
|
||||
return mapper.get(tp, None)
|
||||
|
||||
@classmethod
|
||||
def get_category_serializer_mapper(cls):
|
||||
from ..serializers import remote_app, database_app, k8s_app
|
||||
return {
|
||||
cls.db: database_app.DBAttrsSerializer,
|
||||
cls.remote_app: remote_app.RemoteAppAttrsSerializer,
|
||||
cls.cloud: k8s_app.CloudAttrsSerializer,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_category_serializer_cls(cls, cg):
|
||||
mapper = cls.get_category_serializer_mapper()
|
||||
return mapper.get(cg, None)
|
||||
|
||||
@classmethod
|
||||
def get_no_password_serializer_cls(cls):
|
||||
from ..serializers import common
|
||||
return common.NoPasswordSerializer
|
||||
|
||||
|
||||
class Application(CommonModelMixin, OrgModelMixin):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
domain = models.ForeignKey('assets.Domain', null=True, blank=True, related_name='applications', verbose_name=_("Domain"), on_delete=models.SET_NULL)
|
||||
category = models.CharField(max_length=16, choices=Category.choices, verbose_name=_('Category'))
|
||||
type = models.CharField(max_length=16, choices=Category.get_all_type_choices(), verbose_name=_('Type'))
|
||||
attrs = models.JSONField()
|
||||
comment = models.TextField(
|
||||
max_length=128, default='', blank=True, verbose_name=_('Comment')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'name')]
|
||||
ordering = ('name',)
|
||||
|
||||
def __str__(self):
|
||||
category_display = self.get_category_display()
|
||||
type_display = self.get_type_display()
|
||||
return f'{self.name}({type_display})[{category_display}]'
|
||||
|
||||
def category_is_remote_app(self):
|
||||
return self.category == Category.remote_app
|
||||
@@ -1,3 +1,5 @@
|
||||
from .application import *
|
||||
from .remote_app import *
|
||||
from .database_app import *
|
||||
from .k8s_app import *
|
||||
from .common import *
|
||||
|
||||
42
apps/applications/serializers/application.py
Normal file
42
apps/applications/serializers/application.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
from .. import models
|
||||
|
||||
__all__ = [
|
||||
'ApplicationSerializer',
|
||||
]
|
||||
|
||||
|
||||
class ApplicationSerializer(BulkOrgResourceModelSerializer):
|
||||
category_display = serializers.ReadOnlyField(source='get_category_display', label=_('Category'))
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type'))
|
||||
|
||||
class Meta:
|
||||
model = models.Application
|
||||
fields = [
|
||||
'id', 'name', 'category', 'category_display', 'type', 'type_display', 'attrs',
|
||||
'domain', 'created_by', 'date_created', 'date_updated', 'comment'
|
||||
]
|
||||
read_only_fields = [
|
||||
'created_by', 'date_created', 'date_updated', 'get_type_display',
|
||||
]
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data['attrs'] = validated_data.pop('attrs', {})
|
||||
instance = super().create(validated_data)
|
||||
return instance
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
new_attrs = validated_data.pop('attrs', {})
|
||||
instance = super().update(instance, validated_data)
|
||||
attrs = instance.attrs
|
||||
attrs.update(new_attrs)
|
||||
instance.attrs = attrs
|
||||
instance.save()
|
||||
return instance
|
||||
|
||||
11
apps/applications/serializers/common.py
Normal file
11
apps/applications/serializers/common.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class NoPasswordSerializer(serializers.JSONField):
|
||||
def to_representation(self, value):
|
||||
new_value = {}
|
||||
for k, v in value.items():
|
||||
if 'password' not in k:
|
||||
new_value[k] = v
|
||||
return new_value
|
||||
|
||||
@@ -1,14 +1,35 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.serializers import AdaptedBulkListSerializer
|
||||
|
||||
from .. import models
|
||||
|
||||
__all__ = [
|
||||
'DatabaseAppSerializer',
|
||||
]
|
||||
|
||||
class DBAttrsSerializer(serializers.Serializer):
|
||||
host = serializers.CharField(max_length=128, label=_('Host'))
|
||||
port = serializers.IntegerField(label=_('Port'))
|
||||
# 添加allow_null=True,兼容之前数据库中database字段为None的情况
|
||||
database = serializers.CharField(max_length=128, required=True, allow_null=True, label=_('Database'))
|
||||
|
||||
|
||||
class MySQLAttrsSerializer(DBAttrsSerializer):
|
||||
port = serializers.IntegerField(default=3306, label=_('Port'))
|
||||
|
||||
|
||||
class PostgreAttrsSerializer(DBAttrsSerializer):
|
||||
port = serializers.IntegerField(default=5432, label=_('Port'))
|
||||
|
||||
|
||||
class OracleAttrsSerializer(DBAttrsSerializer):
|
||||
port = serializers.IntegerField(default=1521, label=_('Port'))
|
||||
|
||||
|
||||
class MariaDBAttrsSerializer(MySQLAttrsSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class DatabaseAppSerializer(BulkOrgResourceModelSerializer):
|
||||
@@ -24,3 +45,6 @@ class DatabaseAppSerializer(BulkOrgResourceModelSerializer):
|
||||
'created_by', 'date_created', 'date_updated'
|
||||
'get_type_display',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'get_type_display': {'label': _('Type for display')},
|
||||
}
|
||||
|
||||
@@ -1,15 +1,20 @@
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .. import models
|
||||
|
||||
__all__ = [
|
||||
'K8sAppSerializer',
|
||||
]
|
||||
|
||||
class CloudAttrsSerializer(serializers.Serializer):
|
||||
cluster = serializers.CharField(max_length=1024, label=_('Cluster'))
|
||||
|
||||
|
||||
class K8sAttrsSerializer(CloudAttrsSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class K8sAppSerializer(BulkOrgResourceModelSerializer):
|
||||
type_display = serializers.CharField(source='get_type_display', read_only=True)
|
||||
type_display = serializers.CharField(source='get_type_display', read_only=True, label=_('Type for display'))
|
||||
|
||||
class Meta:
|
||||
model = models.K8sApp
|
||||
|
||||
@@ -2,21 +2,138 @@
|
||||
#
|
||||
|
||||
import copy
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.serializers import AdaptedBulkListSerializer
|
||||
from common.fields.serializer import CustomMetaDictField
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from assets.models import Asset
|
||||
|
||||
from .. import const
|
||||
from ..models import RemoteApp
|
||||
from ..models import RemoteApp, Category, Application
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'RemoteAppSerializer', 'RemoteAppConnectionInfoSerializer',
|
||||
]
|
||||
class CharPrimaryKeyRelatedField(serializers.PrimaryKeyRelatedField):
|
||||
|
||||
def to_internal_value(self, data):
|
||||
instance = super().to_internal_value(data)
|
||||
return str(instance.id)
|
||||
|
||||
def to_representation(self, value):
|
||||
# value is instance.id
|
||||
if self.pk_field is not None:
|
||||
return self.pk_field.to_representation(value)
|
||||
return value
|
||||
|
||||
|
||||
class RemoteAppAttrsSerializer(serializers.Serializer):
|
||||
asset_info = serializers.SerializerMethodField()
|
||||
asset = CharPrimaryKeyRelatedField(queryset=Asset.objects, required=False, label=_("Asset"))
|
||||
path = serializers.CharField(max_length=128, label=_('Application path'))
|
||||
|
||||
@staticmethod
|
||||
def get_asset_info(obj):
|
||||
asset_info = {}
|
||||
asset_id = obj.get('asset')
|
||||
if not asset_id:
|
||||
return asset_info
|
||||
try:
|
||||
asset = Asset.objects.get(id=asset_id)
|
||||
asset_info.update({
|
||||
'id': str(asset.id),
|
||||
'hostname': asset.hostname
|
||||
})
|
||||
except ObjectDoesNotExist as e:
|
||||
logger.error(e)
|
||||
return asset_info
|
||||
|
||||
|
||||
class ChromeAttrsSerializer(RemoteAppAttrsSerializer):
|
||||
REMOTE_APP_PATH = 'C:\Program Files (x86)\Google\Chrome\Application\chrome.exe'
|
||||
path = serializers.CharField(max_length=128, label=_('Application path'), default=REMOTE_APP_PATH)
|
||||
chrome_target = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Target URL'))
|
||||
chrome_username = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Username'))
|
||||
chrome_password = serializers.CharField(max_length=128, allow_blank=True, required=False, write_only=True, label=_('Password'))
|
||||
|
||||
|
||||
class MySQLWorkbenchAttrsSerializer(RemoteAppAttrsSerializer):
|
||||
REMOTE_APP_PATH = 'C:\Program Files\MySQL\MySQL Workbench 8.0 CE\MySQLWorkbench.exe'
|
||||
path = serializers.CharField(max_length=128, label=_('Application path'), default=REMOTE_APP_PATH)
|
||||
mysql_workbench_ip = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('IP'))
|
||||
mysql_workbench_port = serializers.IntegerField(required=False, label=_('Port'))
|
||||
mysql_workbench_name = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Database'))
|
||||
mysql_workbench_username = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Username'))
|
||||
mysql_workbench_password = serializers.CharField(max_length=128, allow_blank=True, required=False, write_only=True, label=_('Password'))
|
||||
|
||||
|
||||
class VMwareClientAttrsSerializer(RemoteAppAttrsSerializer):
|
||||
REMOTE_APP_PATH = 'C:\Program Files (x86)\VMware\Infrastructure\Virtual Infrastructure Client\Launcher\VpxClient.exe'
|
||||
path = serializers.CharField(max_length=128, label=_('Application path'), default=REMOTE_APP_PATH)
|
||||
vmware_target = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Target URL'))
|
||||
vmware_username = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Username'))
|
||||
vmware_password = serializers.CharField(max_length=128, allow_blank=True, required=False, write_only=True, label=_('Password'))
|
||||
|
||||
|
||||
class CustomRemoteAppAttrsSeralizers(RemoteAppAttrsSerializer):
|
||||
custom_cmdline = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Operating parameter'))
|
||||
custom_target = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Target url'))
|
||||
custom_username = serializers.CharField(max_length=128, allow_blank=True, required=False, label=_('Username'))
|
||||
custom_password = serializers.CharField(max_length=128, allow_blank=True, required=False, write_only=True, label=_('Password'))
|
||||
|
||||
|
||||
class RemoteAppConnectionInfoSerializer(serializers.ModelSerializer):
|
||||
parameter_remote_app = serializers.SerializerMethodField()
|
||||
asset = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Application
|
||||
fields = [
|
||||
'id', 'name', 'asset', 'parameter_remote_app',
|
||||
]
|
||||
read_only_fields = ['parameter_remote_app']
|
||||
|
||||
@staticmethod
|
||||
def get_parameters(obj):
|
||||
"""
|
||||
返回Guacamole需要的RemoteApp配置参数信息中的parameters参数
|
||||
"""
|
||||
serializer_cls = Category.get_type_serializer_cls(obj.type)
|
||||
fields = serializer_cls().get_fields()
|
||||
fields.pop('asset', None)
|
||||
fields_name = list(fields.keys())
|
||||
attrs = obj.attrs
|
||||
_parameters = list()
|
||||
_parameters.append(obj.type)
|
||||
for field_name in list(fields_name):
|
||||
value = attrs.get(field_name, None)
|
||||
if not value:
|
||||
continue
|
||||
if field_name == 'path':
|
||||
value = '\"%s\"' % value
|
||||
_parameters.append(str(value))
|
||||
_parameters = ' '.join(_parameters)
|
||||
return _parameters
|
||||
|
||||
def get_parameter_remote_app(self, obj):
|
||||
parameters = self.get_parameters(obj)
|
||||
parameter = {
|
||||
'program': const.REMOTE_APP_BOOT_PROGRAM_NAME,
|
||||
'working_directory': '',
|
||||
'parameters': parameters,
|
||||
}
|
||||
return parameter
|
||||
|
||||
@staticmethod
|
||||
def get_asset(obj):
|
||||
return obj.attrs.get('asset')
|
||||
|
||||
|
||||
# TODO: DELETE
|
||||
class RemoteAppParamsDictField(CustomMetaDictField):
|
||||
type_fields_map = const.REMOTE_APP_TYPE_FIELDS_MAP
|
||||
default_type = const.REMOTE_APP_TYPE_CHROME
|
||||
@@ -24,8 +141,9 @@ class RemoteAppParamsDictField(CustomMetaDictField):
|
||||
convert_key_to_upper = False
|
||||
|
||||
|
||||
# TODO: DELETE
|
||||
class RemoteAppSerializer(BulkOrgResourceModelSerializer):
|
||||
params = RemoteAppParamsDictField()
|
||||
params = RemoteAppParamsDictField(label=_('Parameters'))
|
||||
type_fields_map = const.REMOTE_APP_TYPE_FIELDS_MAP
|
||||
|
||||
class Meta:
|
||||
@@ -39,6 +157,10 @@ class RemoteAppSerializer(BulkOrgResourceModelSerializer):
|
||||
'created_by', 'date_created', 'asset_info',
|
||||
'get_type_display'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'asset_info': {'label': _('Asset info')},
|
||||
'get_type_display': {'label': _('Type for display')},
|
||||
}
|
||||
|
||||
def process_params(self, instance, validated_data):
|
||||
new_params = copy.deepcopy(validated_data.get('params', {}))
|
||||
@@ -66,21 +188,3 @@ class RemoteAppSerializer(BulkOrgResourceModelSerializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class RemoteAppConnectionInfoSerializer(serializers.ModelSerializer):
|
||||
parameter_remote_app = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = RemoteApp
|
||||
fields = [
|
||||
'id', 'name', 'asset', 'parameter_remote_app',
|
||||
]
|
||||
read_only_fields = ['parameter_remote_app']
|
||||
|
||||
@staticmethod
|
||||
def get_parameter_remote_app(obj):
|
||||
parameter = {
|
||||
'program': const.REMOTE_APP_BOOT_PROGRAM_NAME,
|
||||
'working_directory': '',
|
||||
'parameters': obj.parameters,
|
||||
}
|
||||
return parameter
|
||||
|
||||
@@ -10,6 +10,7 @@ from .. import api
|
||||
app_name = 'applications'
|
||||
|
||||
router = BulkRouter()
|
||||
router.register(r'applications', api.ApplicationViewSet, 'application')
|
||||
router.register(r'remote-apps', api.RemoteAppViewSet, 'remote-app')
|
||||
router.register(r'database-apps', api.DatabaseAppViewSet, 'database-app')
|
||||
router.register(r'k8s-apps', api.K8sAppViewSet, 'k8s-app')
|
||||
|
||||
@@ -94,7 +94,6 @@ class AdminUserAssetsListView(generics.ListAPIView):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.AssetSimpleSerializer
|
||||
filter_fields = ("hostname", "ip")
|
||||
http_method_names = ['get']
|
||||
search_fields = filter_fields
|
||||
|
||||
def get_object(self):
|
||||
|
||||
@@ -32,7 +32,7 @@ class AssetViewSet(FilterAssetByNodeMixin, OrgBulkModelViewSet):
|
||||
model = Asset
|
||||
filter_fields = (
|
||||
"hostname", "ip", "systemuser__id", "admin_user__id", "platform__base",
|
||||
"is_active", 'ip'
|
||||
"is_active"
|
||||
)
|
||||
search_fields = ("hostname", "ip")
|
||||
ordering_fields = ("hostname", "ip", "port", "cpu_cores")
|
||||
@@ -115,7 +115,6 @@ class AssetTaskCreateApi(generics.CreateAPIView):
|
||||
class AssetGatewayListApi(generics.ListAPIView):
|
||||
permission_classes = (IsOrgAdminOrAppUser,)
|
||||
serializer_class = serializers.GatewayWithAuthSerializer
|
||||
model = Asset
|
||||
|
||||
def get_queryset(self):
|
||||
asset_id = self.kwargs.get('pk')
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
|
||||
from rest_framework.views import APIView, Response
|
||||
from django.views.generic.detail import SingleObjectMixin
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework.views import APIView, Response
|
||||
from rest_framework.serializers import ValidationError
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.permissions import IsOrgAdmin, IsOrgAdminOrAppUser
|
||||
@@ -42,6 +44,10 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
||||
def post(self, request, *args, **kwargs):
|
||||
self.object = self.get_object(Gateway.objects.all())
|
||||
local_port = self.request.data.get('port') or self.object.port
|
||||
try:
|
||||
local_port = int(local_port)
|
||||
except ValueError:
|
||||
raise ValidationError({'port': _('Number required')})
|
||||
ok, e = self.object.test_connective(local_port=local_port)
|
||||
if ok:
|
||||
return Response("ok")
|
||||
|
||||
@@ -69,6 +69,7 @@ class SerializeToTreeNodeMixin:
|
||||
'ip': asset.ip,
|
||||
'protocols': asset.protocols_as_list,
|
||||
'platform': asset.platform_base,
|
||||
'org_name': asset.org_name
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,11 +5,13 @@ from collections import namedtuple, defaultdict
|
||||
from rest_framework import status
|
||||
from rest_framework.serializers import ValidationError
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.decorators import action
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.shortcuts import get_object_or_404, Http404
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.db.models.signals import m2m_changed
|
||||
|
||||
from common.const.http import POST
|
||||
from common.exceptions import SomeoneIsDoingThis
|
||||
from common.const.signals import PRE_REMOVE, POST_REMOVE
|
||||
from assets.models import Asset
|
||||
@@ -19,6 +21,8 @@ from common.const.distributed_lock_key import UPDATE_NODE_TREE_LOCK_KEY
|
||||
from orgs.mixins.api import OrgModelViewSet
|
||||
from orgs.mixins import generics
|
||||
from orgs.lock import org_level_transaction_lock
|
||||
from orgs.utils import current_org
|
||||
from assets.tasks import check_node_assets_amount_task
|
||||
from ..hands import IsOrgAdmin
|
||||
from ..models import Node
|
||||
from ..tasks import (
|
||||
@@ -46,6 +50,11 @@ class NodeViewSet(OrgModelViewSet):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.NodeSerializer
|
||||
|
||||
@action(methods=[POST], detail=False, url_name='launch-check-assets-amount-task')
|
||||
def launch_check_assets_amount_task(self, request):
|
||||
task = check_node_assets_amount_task.delay(current_org.id)
|
||||
return Response(data={'task': task.id})
|
||||
|
||||
# 仅支持根节点指直接创建,子节点下的节点需要通过children接口创建
|
||||
def perform_create(self, serializer):
|
||||
child_key = Node.org_root().get_next_child_key()
|
||||
@@ -61,6 +70,9 @@ class NodeViewSet(OrgModelViewSet):
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
node = self.get_object()
|
||||
if node.is_org_root():
|
||||
error = _("You can't delete the root node ({})".format(node.value))
|
||||
return Response(data={'error': error}, status=status.HTTP_403_FORBIDDEN)
|
||||
if node.has_children_or_has_assets():
|
||||
error = _("Deletion failed and the node contains children or assets")
|
||||
return Response(data={'error': error}, status=status.HTTP_403_FORBIDDEN)
|
||||
@@ -173,7 +185,7 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||
return []
|
||||
assets = self.instance.get_assets().only(
|
||||
"id", "hostname", "ip", "os",
|
||||
"org_id", "protocols",
|
||||
"org_id", "protocols", "is_active"
|
||||
)
|
||||
return self.serialize_assets(assets, self.instance.key)
|
||||
|
||||
@@ -201,10 +213,8 @@ class NodeAddChildrenApi(generics.UpdateAPIView):
|
||||
def put(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
nodes_id = request.data.get("nodes")
|
||||
children = [get_object_or_none(Node, id=pk) for pk in nodes_id]
|
||||
children = Node.objects.filter(id__in=nodes_id)
|
||||
for node in children:
|
||||
if not node:
|
||||
continue
|
||||
node.parent = instance
|
||||
return Response("OK")
|
||||
|
||||
|
||||
@@ -3,7 +3,8 @@ from django.shortcuts import get_object_or_404
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.permissions import IsOrgAdmin, IsOrgAdminOrAppUser, IsAppUser
|
||||
from common.permissions import IsOrgAdmin, IsOrgAdminOrAppUser
|
||||
from common.drf.filters import CustomFilter
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from orgs.mixins import generics
|
||||
from orgs.utils import tmp_to_org
|
||||
@@ -12,14 +13,14 @@ from .. import serializers
|
||||
from ..serializers import SystemUserWithAuthInfoSerializer
|
||||
from ..tasks import (
|
||||
push_system_user_to_assets_manual, test_system_user_connectivity_manual,
|
||||
push_system_user_a_asset_manual,
|
||||
push_system_user_to_assets
|
||||
)
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
'SystemUserViewSet', 'SystemUserAuthInfoApi', 'SystemUserAssetAuthInfoApi',
|
||||
'SystemUserCommandFilterRuleListApi', 'SystemUserTaskApi',
|
||||
'SystemUserCommandFilterRuleListApi', 'SystemUserTaskApi', 'SystemUserAssetsListView',
|
||||
]
|
||||
|
||||
|
||||
@@ -82,18 +83,18 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.SystemUserTaskSerializer
|
||||
|
||||
def do_push(self, system_user, asset=None):
|
||||
if asset is None:
|
||||
def do_push(self, system_user, assets_id=None):
|
||||
if assets_id is None:
|
||||
task = push_system_user_to_assets_manual.delay(system_user)
|
||||
else:
|
||||
username = self.request.query_params.get('username')
|
||||
task = push_system_user_a_asset_manual.delay(
|
||||
system_user, asset, username=username
|
||||
task = push_system_user_to_assets.delay(
|
||||
system_user.id, assets_id, username=username
|
||||
)
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def do_test(system_user, asset=None):
|
||||
def do_test(system_user):
|
||||
task = test_system_user_connectivity_manual.delay(system_user)
|
||||
return task
|
||||
|
||||
@@ -104,11 +105,16 @@ class SystemUserTaskApi(generics.CreateAPIView):
|
||||
def perform_create(self, serializer):
|
||||
action = serializer.validated_data["action"]
|
||||
asset = serializer.validated_data.get('asset')
|
||||
assets = serializer.validated_data.get('assets') or []
|
||||
|
||||
system_user = self.get_object()
|
||||
if action == 'push':
|
||||
task = self.do_push(system_user, asset)
|
||||
assets = [asset] if asset else assets
|
||||
assets_id = [asset.id for asset in assets]
|
||||
assets_id = assets_id if assets_id else None
|
||||
task = self.do_push(system_user, assets_id)
|
||||
else:
|
||||
task = self.do_test(system_user, asset)
|
||||
task = self.do_test(system_user)
|
||||
data = getattr(serializer, '_data', {})
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
@@ -125,3 +131,18 @@ class SystemUserCommandFilterRuleListApi(generics.ListAPIView):
|
||||
pk = self.kwargs.get('pk', None)
|
||||
system_user = get_object_or_404(SystemUser, pk=pk)
|
||||
return system_user.cmd_filter_rules
|
||||
|
||||
|
||||
class SystemUserAssetsListView(generics.ListAPIView):
|
||||
permission_classes = (IsOrgAdmin,)
|
||||
serializer_class = serializers.AssetSimpleSerializer
|
||||
filter_fields = ("hostname", "ip")
|
||||
search_fields = filter_fields
|
||||
|
||||
def get_object(self):
|
||||
pk = self.kwargs.get('pk')
|
||||
return get_object_or_404(SystemUser, pk=pk)
|
||||
|
||||
def get_queryset(self):
|
||||
system_user = self.get_object()
|
||||
return system_user.get_all_assets()
|
||||
|
||||
@@ -95,7 +95,7 @@ class SystemUserNodeRelationViewSet(BaseRelationViewSet):
|
||||
'id', 'node', 'systemuser',
|
||||
]
|
||||
search_fields = [
|
||||
"node__value", "systemuser__name", "systemuser_username"
|
||||
"node__value", "systemuser__name", "systemuser__username"
|
||||
]
|
||||
|
||||
def get_objects_attr(self):
|
||||
|
||||
27
apps/assets/migrations/0058_auto_20201023_1115.py
Normal file
27
apps/assets/migrations/0058_auto_20201023_1115.py
Normal file
@@ -0,0 +1,27 @@
|
||||
# Generated by Django 2.2.13 on 2020-10-23 03:15
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0057_fill_node_value_assets_amount_and_parent_key'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['-date_created'], 'verbose_name': 'Asset'},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='asset',
|
||||
name='comment',
|
||||
field=models.TextField(blank=True, default='', verbose_name='Comment'),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='commandfilterrule',
|
||||
name='content',
|
||||
field=models.TextField(help_text='One line one command', verbose_name='Content'),
|
||||
),
|
||||
]
|
||||
28
apps/assets/migrations/0059_auto_20201027_1905.py
Normal file
28
apps/assets/migrations/0059_auto_20201027_1905.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Generated by Django 2.2.13 on 2020-10-27 11:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0058_auto_20201023_1115'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='systemuser',
|
||||
name='protocol',
|
||||
field=models.CharField(choices=[('ssh', 'ssh'), ('rdp', 'rdp'), ('telnet', 'telnet'), ('vnc', 'vnc'), ('mysql', 'mysql'), ('oracle', 'oracle'), ('mariadb', 'mariadb'), ('postgresql', 'postgresql'), ('k8s', 'k8s')], default='ssh', max_length=16, verbose_name='Protocol'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='systemuser',
|
||||
name='ad_domain',
|
||||
field=models.CharField(default='', max_length=256),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='gateway',
|
||||
name='ip',
|
||||
field=models.CharField(db_index=True, max_length=128, verbose_name='IP'),
|
||||
),
|
||||
]
|
||||
58
apps/assets/migrations/0060_node_full_value.py
Normal file
58
apps/assets/migrations/0060_node_full_value.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Generated by Django 2.2.13 on 2020-10-26 11:31
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
def get_node_ancestor_keys(key, with_self=False):
|
||||
parent_keys = []
|
||||
key_list = key.split(":")
|
||||
if not with_self:
|
||||
key_list.pop()
|
||||
for i in range(len(key_list)):
|
||||
parent_keys.append(":".join(key_list))
|
||||
key_list.pop()
|
||||
return parent_keys
|
||||
|
||||
|
||||
def migrate_nodes_value_with_slash(apps, schema_editor):
|
||||
model = apps.get_model("assets", "Node")
|
||||
db_alias = schema_editor.connection.alias
|
||||
nodes = model.objects.using(db_alias).filter(value__contains='/')
|
||||
print('')
|
||||
print("- Start migrate node value if has /")
|
||||
for i, node in enumerate(list(nodes)):
|
||||
new_value = node.value.replace('/', '|')
|
||||
print("{} start migrate node value: {} => {}".format(i, node.value, new_value))
|
||||
node.value = new_value
|
||||
node.save()
|
||||
|
||||
|
||||
def migrate_nodes_full_value(apps, schema_editor):
|
||||
model = apps.get_model("assets", "Node")
|
||||
db_alias = schema_editor.connection.alias
|
||||
nodes = model.objects.using(db_alias).all()
|
||||
print("- Start migrate node full value")
|
||||
for i, node in enumerate(list(nodes)):
|
||||
print("{} start migrate {} node full value".format(i, node.value))
|
||||
ancestor_keys = get_node_ancestor_keys(node.key, True)
|
||||
values = model.objects.filter(key__in=ancestor_keys).values_list('key', 'value')
|
||||
values = [v for k, v in sorted(values, key=lambda x: len(x[0]))]
|
||||
node.full_value = '/' + '/'.join(values)
|
||||
node.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0059_auto_20201027_1905'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='node',
|
||||
name='full_value',
|
||||
field=models.CharField(default='', max_length=4096, verbose_name='Full value'),
|
||||
),
|
||||
migrations.RunPython(migrate_nodes_value_with_slash),
|
||||
migrations.RunPython(migrate_nodes_full_value)
|
||||
]
|
||||
17
apps/assets/migrations/0061_auto_20201116_1757.py
Normal file
17
apps/assets/migrations/0061_auto_20201116_1757.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 2.2.13 on 2020-11-16 09:57
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0060_node_full_value'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='node',
|
||||
options={'ordering': ['value'], 'verbose_name': 'Node'},
|
||||
),
|
||||
]
|
||||
17
apps/assets/migrations/0062_auto_20201117_1938.py
Normal file
17
apps/assets/migrations/0062_auto_20201117_1938.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 2.2.13 on 2020-11-17 11:38
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0061_auto_20201116_1757'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='asset',
|
||||
options={'ordering': ['hostname', 'ip'], 'verbose_name': 'Asset'},
|
||||
),
|
||||
]
|
||||
72
apps/assets/migrations/0063_migrate_default_node_key.py
Normal file
72
apps/assets/migrations/0063_migrate_default_node_key.py
Normal file
@@ -0,0 +1,72 @@
|
||||
# Generated by Jiangjie.Bai on 2020-12-01 10:47
|
||||
|
||||
from django.db import migrations
|
||||
from django.db.models import Q
|
||||
|
||||
default_node_value = 'Default' # Always
|
||||
old_default_node_key = '0' # Version <= 1.4.3
|
||||
new_default_node_key = '1' # Version >= 1.4.4
|
||||
|
||||
|
||||
def compute_parent_key(key):
|
||||
try:
|
||||
return key[:key.rindex(':')]
|
||||
except ValueError:
|
||||
return ''
|
||||
|
||||
|
||||
def migrate_default_node_key(apps, schema_editor):
|
||||
""" 将已经存在的Default节点的key从0修改为1 """
|
||||
# 1.4.3版本中Default节点的key为0
|
||||
print('')
|
||||
Node = apps.get_model('assets', 'Node')
|
||||
Asset = apps.get_model('assets', 'Asset')
|
||||
|
||||
# key为0的节点
|
||||
old_default_node = Node.objects.filter(key=old_default_node_key, value=default_node_value).first()
|
||||
if not old_default_node:
|
||||
print(f'Check old default node `key={old_default_node_key} value={default_node_value}` not exists')
|
||||
return
|
||||
print(f'Check old default node `key={old_default_node_key} value={default_node_value}` exists')
|
||||
# key为1的节点
|
||||
new_default_node = Node.objects.filter(key=new_default_node_key, value=default_node_value).first()
|
||||
if new_default_node:
|
||||
print(f'Check new default node `key={new_default_node_key} value={default_node_value}` exists')
|
||||
all_assets = Asset.objects.filter(
|
||||
Q(nodes__key__startswith=f'{new_default_node_key}:') | Q(nodes__key=new_default_node_key)
|
||||
).distinct()
|
||||
if all_assets:
|
||||
print(f'Check new default node has assets (count: {len(all_assets)})')
|
||||
return
|
||||
all_children = Node.objects.filter(key__startswith=f'{new_default_node_key}:')
|
||||
if all_children:
|
||||
print(f'Check new default node has children nodes (count: {len(all_children)})')
|
||||
return
|
||||
print(f'Check new default node not has assets and children nodes, delete it.')
|
||||
new_default_node.delete()
|
||||
# 执行修改
|
||||
print(f'Modify old default node `key` from `{old_default_node_key}` to `{new_default_node_key}`')
|
||||
nodes = Node.objects.filter(
|
||||
Q(key__istartswith=f'{old_default_node_key}:') | Q(key=old_default_node_key)
|
||||
)
|
||||
for node in nodes:
|
||||
old_key = node.key
|
||||
key_list = old_key.split(':', maxsplit=1)
|
||||
key_list[0] = new_default_node_key
|
||||
new_key = ':'.join(key_list)
|
||||
node.key = new_key
|
||||
node.parent_key = compute_parent_key(node.key)
|
||||
# 批量更新
|
||||
print(f'Bulk update nodes `key` and `parent_key`, (count: {len(nodes)})')
|
||||
Node.objects.bulk_update(nodes, ['key', 'parent_key'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0062_auto_20201117_1938'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(migrate_default_node_key)
|
||||
]
|
||||
17
apps/assets/migrations/0064_auto_20201203_1100.py
Normal file
17
apps/assets/migrations/0064_auto_20201203_1100.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Generated by Django 3.1 on 2020-12-03 03:00
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0063_migrate_default_node_key'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='node',
|
||||
options={'ordering': ['parent_key', 'value'], 'verbose_name': 'Node'},
|
||||
),
|
||||
]
|
||||
@@ -227,7 +227,7 @@ class Asset(ProtocolsMixin, NodesRelationMixin, OrgModelMixin):
|
||||
labels = models.ManyToManyField('assets.Label', blank=True, related_name='assets', verbose_name=_("Labels"))
|
||||
created_by = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Created by'))
|
||||
date_created = models.DateTimeField(auto_now_add=True, null=True, blank=True, verbose_name=_('Date created'))
|
||||
comment = models.TextField(max_length=128, default='', blank=True, verbose_name=_('Comment'))
|
||||
comment = models.TextField(default='', blank=True, verbose_name=_('Comment'))
|
||||
|
||||
objects = AssetManager.from_queryset(AssetQuerySet)()
|
||||
org_objects = AssetOrgManager.from_queryset(AssetQuerySet)()
|
||||
@@ -313,6 +313,12 @@ class Asset(ProtocolsMixin, NodesRelationMixin, OrgModelMixin):
|
||||
}
|
||||
return info
|
||||
|
||||
def nodes_display(self):
|
||||
names = []
|
||||
for n in self.nodes.all():
|
||||
names.append(n.full_value)
|
||||
return names
|
||||
|
||||
def as_node(self):
|
||||
from .node import Node
|
||||
fake_node = Node()
|
||||
@@ -355,3 +361,4 @@ class Asset(ProtocolsMixin, NodesRelationMixin, OrgModelMixin):
|
||||
class Meta:
|
||||
unique_together = [('org_id', 'hostname')]
|
||||
verbose_name = _("Asset")
|
||||
ordering = ["hostname", "ip"]
|
||||
|
||||
@@ -158,9 +158,11 @@ class AuthMixin:
|
||||
if update_fields:
|
||||
self.save(update_fields=update_fields)
|
||||
|
||||
def has_special_auth(self, asset=None):
|
||||
def has_special_auth(self, asset=None, username=None):
|
||||
from .authbook import AuthBook
|
||||
queryset = AuthBook.objects.filter(username=self.username)
|
||||
if username is None:
|
||||
username = self.username
|
||||
queryset = AuthBook.objects.filter(username=username)
|
||||
if asset:
|
||||
queryset = queryset.filter(asset=asset)
|
||||
return queryset.exists()
|
||||
|
||||
@@ -52,7 +52,7 @@ class CommandFilterRule(OrgModelMixin):
|
||||
type = models.CharField(max_length=16, default=TYPE_COMMAND, choices=TYPE_CHOICES, verbose_name=_("Type"))
|
||||
priority = models.IntegerField(default=50, verbose_name=_("Priority"), help_text=_("1-100, the higher will be match first"),
|
||||
validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
content = models.TextField(max_length=1024, verbose_name=_("Content"), help_text=_("One line one command"))
|
||||
content = models.TextField(verbose_name=_("Content"), help_text=_("One line one command"))
|
||||
action = models.IntegerField(default=ACTION_DENY, choices=ACTION_CHOICES, verbose_name=_("Action"))
|
||||
comment = models.CharField(max_length=64, blank=True, default='', verbose_name=_("Comment"))
|
||||
date_created = models.DateTimeField(auto_now_add=True)
|
||||
|
||||
@@ -9,6 +9,7 @@ import paramiko
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.utils.strings import no_special_chars
|
||||
from orgs.mixins.models import OrgModelMixin
|
||||
from .base import BaseUser
|
||||
|
||||
@@ -47,7 +48,7 @@ class Gateway(BaseUser):
|
||||
(PROTOCOL_SSH, 'ssh'),
|
||||
(PROTOCOL_RDP, 'rdp'),
|
||||
)
|
||||
ip = models.GenericIPAddressField(max_length=32, verbose_name=_('IP'), db_index=True)
|
||||
ip = models.CharField(max_length=128, verbose_name=_('IP'), db_index=True)
|
||||
port = models.IntegerField(default=22, verbose_name=_('Port'))
|
||||
protocol = models.CharField(choices=PROTOCOL_CHOICES, max_length=16, default=PROTOCOL_SSH, verbose_name=_("Protocol"))
|
||||
domain = models.ForeignKey(Domain, on_delete=models.CASCADE, verbose_name=_("Domain"))
|
||||
@@ -64,8 +65,8 @@ class Gateway(BaseUser):
|
||||
def test_connective(self, local_port=None):
|
||||
if local_port is None:
|
||||
local_port = self.port
|
||||
if self.password and not re.match(r'\w+$', self.password):
|
||||
return False, _("Password should not contain special characters")
|
||||
if self.password and not no_special_chars(self.password):
|
||||
return False, _("Password should not contains special characters")
|
||||
|
||||
client = paramiko.SSHClient()
|
||||
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
|
||||
|
||||
@@ -20,9 +20,13 @@ class FavoriteAsset(CommonModelMixin):
|
||||
return cls.objects.filter(user=user).values_list('asset', flat=True)
|
||||
|
||||
@classmethod
|
||||
def get_user_favorite_assets(cls, user):
|
||||
def get_user_favorite_assets(cls, user, asset_perms_id=None):
|
||||
from assets.models import Asset
|
||||
from perms.utils.user_asset_permission import get_user_granted_all_assets
|
||||
asset_ids = get_user_granted_all_assets(user).values_list('id', flat=True)
|
||||
from perms.utils.asset.user_permission import get_user_granted_all_assets
|
||||
asset_ids = get_user_granted_all_assets(
|
||||
user,
|
||||
via_mapping_node=False,
|
||||
asset_perms_id=asset_perms_id
|
||||
).values_list('id', flat=True)
|
||||
query_name = cls.asset.field.related_query_name()
|
||||
return Asset.org_objects.filter(**{f'{query_name}__user_id': user.id}, id__in=asset_ids).distinct()
|
||||
|
||||
@@ -13,7 +13,7 @@ from django.db.transaction import atomic
|
||||
from common.utils import get_logger
|
||||
from common.utils.common import lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from orgs.utils import get_current_org, tmp_to_org, current_org
|
||||
from orgs.utils import get_current_org, tmp_to_org
|
||||
from orgs.models import Organization
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class FamilyMixin:
|
||||
if value is None:
|
||||
value = child_key
|
||||
child = self.__class__.objects.create(
|
||||
id=_id, key=child_key, value=value, parent_key=self.key,
|
||||
id=_id, key=child_key, value=value
|
||||
)
|
||||
return child
|
||||
|
||||
@@ -205,6 +205,30 @@ class FamilyMixin:
|
||||
sibling = sibling.exclude(key=self.key)
|
||||
return sibling
|
||||
|
||||
@classmethod
|
||||
def create_node_by_full_value(cls, full_value):
|
||||
if not full_value:
|
||||
return []
|
||||
nodes_family = full_value.split('/')
|
||||
nodes_family = [v for v in nodes_family if v]
|
||||
org_root = cls.org_root()
|
||||
if nodes_family[0] == org_root.value:
|
||||
nodes_family = nodes_family[1:]
|
||||
return cls.create_nodes_recurse(nodes_family, org_root)
|
||||
|
||||
@classmethod
|
||||
def create_nodes_recurse(cls, values, parent=None):
|
||||
values = [v for v in values if v]
|
||||
if not values:
|
||||
return None
|
||||
if parent is None:
|
||||
parent = cls.org_root()
|
||||
value = values[0]
|
||||
child, created = parent.get_or_create_child(value=value)
|
||||
if len(values) == 1:
|
||||
return child
|
||||
return cls.create_nodes_recurse(values[1:], child)
|
||||
|
||||
def get_family(self):
|
||||
ancestors = self.get_ancestors()
|
||||
children = self.get_all_children()
|
||||
@@ -328,7 +352,10 @@ class SomeNodesMixin:
|
||||
|
||||
@classmethod
|
||||
def org_root(cls):
|
||||
root = cls.objects.filter(parent_key='').exclude(key__startswith='-')
|
||||
root = cls.objects.filter(parent_key='')\
|
||||
.filter(key__regex=r'^[0-9]+$')\
|
||||
.exclude(key__startswith='-')\
|
||||
.order_by('key')
|
||||
if root:
|
||||
return root[0]
|
||||
else:
|
||||
@@ -372,6 +399,7 @@ class Node(OrgModelMixin, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||
key = models.CharField(unique=True, max_length=64, verbose_name=_("Key")) # '1:1:1:1'
|
||||
value = models.CharField(max_length=128, verbose_name=_("Value"))
|
||||
full_value = models.CharField(max_length=4096, verbose_name=_('Full value'), default='')
|
||||
child_mark = models.IntegerField(default=0)
|
||||
date_create = models.DateTimeField(auto_now_add=True)
|
||||
parent_key = models.CharField(max_length=64, verbose_name=_("Parent key"),
|
||||
@@ -384,10 +412,10 @@ class Node(OrgModelMixin, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
|
||||
class Meta:
|
||||
verbose_name = _("Node")
|
||||
ordering = ['key']
|
||||
ordering = ['parent_key', 'value']
|
||||
|
||||
def __str__(self):
|
||||
return self.value
|
||||
return self.full_value
|
||||
|
||||
# def __eq__(self, other):
|
||||
# if not other:
|
||||
@@ -411,15 +439,14 @@ class Node(OrgModelMixin, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
def name(self):
|
||||
return self.value
|
||||
|
||||
@lazyproperty
|
||||
def full_value(self):
|
||||
def computed_full_value(self):
|
||||
# 不要在列表中调用该属性
|
||||
values = self.__class__.objects.filter(
|
||||
key__in=self.get_ancestor_keys()
|
||||
).values_list('key', 'value')
|
||||
values = [v for k, v in sorted(values, key=lambda x: len(x[0]))]
|
||||
values.append(self.value)
|
||||
return ' / '.join(values)
|
||||
values.append(str(self.value))
|
||||
return '/' + '/'.join(values)
|
||||
|
||||
@property
|
||||
def level(self):
|
||||
@@ -458,3 +485,27 @@ class Node(OrgModelMixin, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
if self.has_children_or_has_assets():
|
||||
return
|
||||
return super().delete(using=using, keep_parents=keep_parents)
|
||||
|
||||
def update_child_full_value(self):
|
||||
nodes = self.get_all_children(with_self=True)
|
||||
sort_key_func = lambda n: [int(i) for i in n.key.split(':')]
|
||||
nodes_sorted = sorted(list(nodes), key=sort_key_func)
|
||||
nodes_mapper = {n.key: n for n in nodes_sorted}
|
||||
if not self.is_org_root():
|
||||
# 如果是org_root,那么parent_key为'', parent为自己,所以这种情况不处理
|
||||
# 更新自己时,自己的parent_key获取不到
|
||||
nodes_mapper.update({self.parent_key: self.parent})
|
||||
for node in nodes_sorted:
|
||||
parent = nodes_mapper.get(node.parent_key)
|
||||
if not parent:
|
||||
if node.parent_key:
|
||||
logger.error(f'Node parent node in mapper: {node.parent_key} {node.value}')
|
||||
continue
|
||||
node.full_value = parent.full_value + '/' + node.value
|
||||
self.__class__.objects.bulk_update(nodes, ['full_value'])
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
self.full_value = self.computed_full_value()
|
||||
instance = super().save(*args, **kwargs)
|
||||
self.update_child_full_value()
|
||||
return instance
|
||||
|
||||
@@ -72,6 +72,9 @@ class SystemUser(BaseUser):
|
||||
PROTOCOL_TELNET = 'telnet'
|
||||
PROTOCOL_VNC = 'vnc'
|
||||
PROTOCOL_MYSQL = 'mysql'
|
||||
PROTOCOL_ORACLE = 'oracle'
|
||||
PROTOCOL_MARIADB = 'mariadb'
|
||||
PROTOCOL_POSTGRESQL = 'postgresql'
|
||||
PROTOCOL_K8S = 'k8s'
|
||||
PROTOCOL_CHOICES = (
|
||||
(PROTOCOL_SSH, 'ssh'),
|
||||
@@ -79,6 +82,9 @@ class SystemUser(BaseUser):
|
||||
(PROTOCOL_TELNET, 'telnet'),
|
||||
(PROTOCOL_VNC, 'vnc'),
|
||||
(PROTOCOL_MYSQL, 'mysql'),
|
||||
(PROTOCOL_ORACLE, 'oracle'),
|
||||
(PROTOCOL_MARIADB, 'mariadb'),
|
||||
(PROTOCOL_POSTGRESQL, 'postgresql'),
|
||||
(PROTOCOL_K8S, 'k8s'),
|
||||
)
|
||||
|
||||
@@ -104,6 +110,7 @@ class SystemUser(BaseUser):
|
||||
token = models.TextField(default='', verbose_name=_('Token'))
|
||||
home = models.CharField(max_length=4096, default='', verbose_name=_('Home'), blank=True)
|
||||
system_groups = models.CharField(default='', max_length=4096, verbose_name=_('System groups'), blank=True)
|
||||
ad_domain = models.CharField(default='', max_length=256)
|
||||
_prefer = 'system_user'
|
||||
|
||||
def __str__(self):
|
||||
@@ -126,6 +133,24 @@ class SystemUser(BaseUser):
|
||||
def login_mode_display(self):
|
||||
return self.get_login_mode_display()
|
||||
|
||||
@property
|
||||
def db_application_protocols(self):
|
||||
return [
|
||||
self.PROTOCOL_MYSQL, self.PROTOCOL_ORACLE, self.PROTOCOL_MARIADB,
|
||||
self.PROTOCOL_POSTGRESQL
|
||||
]
|
||||
|
||||
@property
|
||||
def cloud_application_protocols(self):
|
||||
return [self.PROTOCOL_K8S]
|
||||
|
||||
@property
|
||||
def application_category_protocols(self):
|
||||
protocols = []
|
||||
protocols.extend(self.db_application_protocols)
|
||||
protocols.extend(self.cloud_application_protocols)
|
||||
return protocols
|
||||
|
||||
def is_need_push(self):
|
||||
if self.auto_push and self.protocol in [self.PROTOCOL_SSH, self.PROTOCOL_RDP]:
|
||||
return True
|
||||
@@ -138,11 +163,16 @@ class SystemUser(BaseUser):
|
||||
|
||||
@property
|
||||
def is_need_test_asset_connective(self):
|
||||
return self.protocol not in [self.PROTOCOL_MYSQL]
|
||||
return self.protocol not in self.application_category_protocols
|
||||
|
||||
def has_special_auth(self, asset=None, username=None):
|
||||
if username is None and self.username_same_with_user:
|
||||
raise TypeError('System user is dynamic, username should be pass')
|
||||
return super().has_special_auth(asset=asset, username=username)
|
||||
|
||||
@property
|
||||
def can_perm_to_asset(self):
|
||||
return self.protocol not in [self.PROTOCOL_MYSQL]
|
||||
return self.protocol not in self.application_category_protocols
|
||||
|
||||
def _merge_auth(self, other):
|
||||
super()._merge_auth(other)
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import serializers
|
||||
from django.db.models import Prefetch, F, Count
|
||||
from django.db.models import F
|
||||
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from common.serializers import AdaptedBulkListSerializer
|
||||
from ..models import Asset, Node, Label, Platform
|
||||
from ..models import Asset, Node, Platform
|
||||
from .base import ConnectivitySerializer
|
||||
|
||||
__all__ = [
|
||||
@@ -67,8 +66,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer):
|
||||
slug_field='name', queryset=Platform.objects.all(), label=_("Platform")
|
||||
)
|
||||
protocols = ProtocolsField(label=_('Protocols'), required=False)
|
||||
domain_display = serializers.ReadOnlyField(source='domain.name')
|
||||
admin_user_display = serializers.ReadOnlyField(source='admin_user.name')
|
||||
domain_display = serializers.ReadOnlyField(source='domain.name', label=_('Domain name'))
|
||||
admin_user_display = serializers.ReadOnlyField(source='admin_user.name', label=_('Admin user name'))
|
||||
nodes_display = serializers.ListField(child=serializers.CharField(), label=_('Nodes name'), required=False)
|
||||
|
||||
"""
|
||||
资产的数据结构
|
||||
@@ -90,7 +90,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer):
|
||||
'platform': ['name']
|
||||
}
|
||||
fields_m2m = [
|
||||
'nodes', 'labels',
|
||||
'nodes', 'nodes_display', 'labels',
|
||||
]
|
||||
annotates_fields = {
|
||||
# 'admin_user_display': 'admin_user__name'
|
||||
@@ -98,9 +98,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer):
|
||||
fields_as = list(annotates_fields.keys())
|
||||
fields = fields_small + fields_fk + fields_m2m + fields_as
|
||||
read_only_fields = [
|
||||
'vendor', 'model', 'sn', 'cpu_model', 'cpu_count',
|
||||
'cpu_cores', 'cpu_vcpus', 'memory', 'disk_total', 'disk_info',
|
||||
'os', 'os_version', 'os_arch', 'hostname_raw',
|
||||
'created_by', 'date_created',
|
||||
] + fields_as
|
||||
|
||||
@@ -133,14 +130,32 @@ class AssetSerializer(BulkOrgResourceModelSerializer):
|
||||
if protocols_data:
|
||||
validated_data["protocols"] = ' '.join(protocols_data)
|
||||
|
||||
def perform_nodes_display_create(self, instance, nodes_display):
|
||||
if not nodes_display:
|
||||
return
|
||||
nodes_to_set = []
|
||||
for full_value in nodes_display:
|
||||
node = Node.objects.filter(full_value=full_value).first()
|
||||
if node:
|
||||
nodes_to_set.append(node)
|
||||
else:
|
||||
node = Node.create_node_by_full_value(full_value)
|
||||
nodes_to_set.append(node)
|
||||
instance.nodes.set(nodes_to_set)
|
||||
|
||||
def create(self, validated_data):
|
||||
self.compatible_with_old_protocol(validated_data)
|
||||
nodes_display = validated_data.pop('nodes_display', '')
|
||||
instance = super().create(validated_data)
|
||||
self.perform_nodes_display_create(instance, nodes_display)
|
||||
return instance
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
nodes_display = validated_data.pop('nodes_display', '')
|
||||
self.compatible_with_old_protocol(validated_data)
|
||||
return super().update(instance, validated_data)
|
||||
instance = super().update(instance, validated_data)
|
||||
self.perform_nodes_display_create(instance, nodes_display)
|
||||
return instance
|
||||
|
||||
|
||||
class AssetDisplaySerializer(AssetSerializer):
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from common.serializers import AdaptedBulkListSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
from common.validators import NoSpecialChars
|
||||
from ..models import Domain, Gateway
|
||||
from .base import AuthSerializerMixin
|
||||
|
||||
|
||||
class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
asset_count = serializers.SerializerMethodField()
|
||||
gateway_count = serializers.SerializerMethodField()
|
||||
asset_count = serializers.SerializerMethodField(label=_('Assets count'))
|
||||
application_count = serializers.SerializerMethodField(label=_('Applications count'))
|
||||
gateway_count = serializers.SerializerMethodField(label=_('Gateways count'))
|
||||
|
||||
class Meta:
|
||||
model = Domain
|
||||
@@ -20,12 +22,12 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
'comment', 'date_created'
|
||||
]
|
||||
fields_m2m = [
|
||||
'asset_count', 'assets', 'gateway_count',
|
||||
'asset_count', 'assets', 'application_count', 'gateway_count',
|
||||
]
|
||||
fields = fields_small + fields_m2m
|
||||
read_only_fields = ('asset_count', 'gateway_count', 'date_created')
|
||||
extra_kwargs = {
|
||||
'assets': {'required': False}
|
||||
'assets': {'required': False, 'label': _('Assets')},
|
||||
}
|
||||
list_serializer_class = AdaptedBulkListSerializer
|
||||
|
||||
@@ -33,6 +35,10 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
def get_asset_count(obj):
|
||||
return obj.assets.count()
|
||||
|
||||
@staticmethod
|
||||
def get_application_count(obj):
|
||||
return obj.applications.count()
|
||||
|
||||
@staticmethod
|
||||
def get_gateway_count(obj):
|
||||
return obj.gateway_set.all().count()
|
||||
@@ -47,6 +53,9 @@ class GatewaySerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
'private_key', 'public_key', 'domain', 'is_active', 'date_created',
|
||||
'date_updated', 'created_by', 'comment',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {'validators': [NoSpecialChars()]}
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
@@ -25,6 +25,9 @@ class NodeSerializer(BulkOrgResourceModelSerializer):
|
||||
read_only_fields = ['key', 'org_id']
|
||||
|
||||
def validate_value(self, data):
|
||||
if '/' in data:
|
||||
error = _("Can't contains: " + "/")
|
||||
raise serializers.ValidationError(error)
|
||||
if self.instance:
|
||||
instance = self.instance
|
||||
siblings = instance.get_siblings()
|
||||
|
||||
@@ -6,7 +6,6 @@ from common.serializers import AdaptedBulkListSerializer
|
||||
from common.mixins.serializers import BulkSerializerMixin
|
||||
from common.utils import ssh_pubkey_gen
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from assets.models import Node
|
||||
from ..models import SystemUser, Asset
|
||||
from .base import AuthSerializerMixin
|
||||
|
||||
@@ -35,17 +34,18 @@ class SystemUserSerializer(AuthSerializerMixin, BulkOrgResourceModelSerializer):
|
||||
'auto_push', 'cmd_filters', 'sudo', 'shell', 'comment',
|
||||
'auto_generate_key', 'sftp_root', 'token',
|
||||
'assets_amount', 'date_created', 'created_by',
|
||||
'home', 'system_groups'
|
||||
'home', 'system_groups', 'ad_domain'
|
||||
]
|
||||
extra_kwargs = {
|
||||
'password': {"write_only": True},
|
||||
'public_key': {"write_only": True},
|
||||
'private_key': {"write_only": True},
|
||||
'token': {"write_only": True},
|
||||
'nodes_amount': {'label': _('Node')},
|
||||
'assets_amount': {'label': _('Asset')},
|
||||
'nodes_amount': {'label': _('Nodes amount')},
|
||||
'assets_amount': {'label': _('Assets amount')},
|
||||
'login_mode_display': {'label': _('Login mode display')},
|
||||
'created_by': {'read_only': True},
|
||||
'ad_domain': {'required': False, 'allow_blank': True, 'label': _('Ad domain')},
|
||||
}
|
||||
|
||||
def validate_auto_push(self, value):
|
||||
@@ -154,14 +154,18 @@ class SystemUserListSerializer(SystemUserSerializer):
|
||||
'priority', "username_same_with_user",
|
||||
'auto_push', 'sudo', 'shell', 'comment',
|
||||
"assets_amount", 'home', 'system_groups',
|
||||
'auto_generate_key',
|
||||
'auto_generate_key', 'ad_domain',
|
||||
'sftp_root',
|
||||
]
|
||||
|
||||
extra_kwargs = {
|
||||
'password': {"write_only": True},
|
||||
'public_key': {"write_only": True},
|
||||
'private_key': {"write_only": True},
|
||||
'nodes_amount': {'label': _('Nodes amount')},
|
||||
'assets_amount': {'label': _('Assets amount')},
|
||||
'login_mode_display': {'label': _('Login mode display')},
|
||||
'created_by': {'read_only': True},
|
||||
'ad_domain': {'label': _('Ad domain')},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -179,7 +183,8 @@ class SystemUserWithAuthInfoSerializer(SystemUserSerializer):
|
||||
'login_mode', 'login_mode_display',
|
||||
'priority', 'username_same_with_user',
|
||||
'auto_push', 'sudo', 'shell', 'comment',
|
||||
'auto_generate_key', 'sftp_root', 'token'
|
||||
'auto_generate_key', 'sftp_root', 'token',
|
||||
'ad_domain',
|
||||
]
|
||||
extra_kwargs = {
|
||||
'nodes_amount': {'label': _('Node')},
|
||||
@@ -252,4 +257,8 @@ class SystemUserTaskSerializer(serializers.Serializer):
|
||||
asset = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Asset.objects, allow_null=True, required=False, write_only=True
|
||||
)
|
||||
assets = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Asset.objects, allow_null=True, required=False, write_only=True,
|
||||
many=True
|
||||
)
|
||||
task = serializers.CharField(read_only=True)
|
||||
|
||||
@@ -4,7 +4,7 @@ from operator import add, sub
|
||||
|
||||
from assets.utils import is_asset_exists_in_node
|
||||
from django.db.models.signals import (
|
||||
post_save, m2m_changed, pre_delete, post_delete
|
||||
post_save, m2m_changed, pre_delete, post_delete, pre_save
|
||||
)
|
||||
from django.db.models import Q, F
|
||||
from django.dispatch import receiver
|
||||
@@ -37,6 +37,11 @@ def test_asset_conn_on_created(asset):
|
||||
test_asset_connectivity_util.delay([asset])
|
||||
|
||||
|
||||
@receiver(pre_save, sender=Node)
|
||||
def on_node_pre_save(sender, instance: Node, **kwargs):
|
||||
instance.parent_key = instance.compute_parent_key()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Asset)
|
||||
@on_transaction_commit
|
||||
def on_asset_created_or_update(sender, instance=None, created=False, **kwargs):
|
||||
@@ -58,7 +63,8 @@ def on_asset_created_or_update(sender, instance=None, created=False, **kwargs):
|
||||
|
||||
|
||||
@receiver(post_save, sender=SystemUser, dispatch_uid="jms")
|
||||
def on_system_user_update(sender, instance=None, created=True, **kwargs):
|
||||
@on_transaction_commit
|
||||
def on_system_user_update(instance: SystemUser, created, **kwargs):
|
||||
"""
|
||||
当系统用户更新时,可能更新了秘钥,用户名等,这时要自动推送系统用户到资产上,
|
||||
其实应该当 用户名,密码,秘钥 sudo等更新时再推送,这里偷个懒,
|
||||
@@ -68,48 +74,52 @@ def on_system_user_update(sender, instance=None, created=True, **kwargs):
|
||||
if instance and not created:
|
||||
logger.info("System user update signal recv: {}".format(instance))
|
||||
assets = instance.assets.all().valid()
|
||||
push_system_user_to_assets.delay(instance, assets)
|
||||
push_system_user_to_assets.delay(instance.id, [_asset.id for _asset in assets])
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=SystemUser.assets.through)
|
||||
def on_system_user_assets_change(sender, instance=None, action='', model=None, pk_set=None, **kwargs):
|
||||
@on_transaction_commit
|
||||
def on_system_user_assets_change(instance, action, model, pk_set, **kwargs):
|
||||
"""
|
||||
当系统用户和资产关系发生变化时,应该重新推送系统用户到新添加的资产中
|
||||
"""
|
||||
if action != POST_ADD:
|
||||
return
|
||||
logger.debug("System user assets change signal recv: {}".format(instance))
|
||||
queryset = model.objects.filter(pk__in=pk_set)
|
||||
if model == Asset:
|
||||
system_users = [instance]
|
||||
assets = queryset
|
||||
system_users_id = [instance.id]
|
||||
assets_id = pk_set
|
||||
else:
|
||||
system_users = queryset
|
||||
assets = [instance]
|
||||
for system_user in system_users:
|
||||
push_system_user_to_assets.delay(system_user, assets)
|
||||
system_users_id = pk_set
|
||||
assets_id = [instance.id]
|
||||
for system_user_id in system_users_id:
|
||||
push_system_user_to_assets.delay(system_user_id, assets_id)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=SystemUser.users.through)
|
||||
def on_system_user_users_change(sender, instance=None, action='', model=None, pk_set=None, **kwargs):
|
||||
@on_transaction_commit
|
||||
def on_system_user_users_change(sender, instance: SystemUser, action, model, pk_set, reverse, **kwargs):
|
||||
"""
|
||||
当系统用户和用户关系发生变化时,应该重新推送系统用户资产中
|
||||
"""
|
||||
if action != POST_ADD:
|
||||
return
|
||||
|
||||
if reverse:
|
||||
raise M2MReverseNotAllowed
|
||||
|
||||
if not instance.username_same_with_user:
|
||||
return
|
||||
|
||||
logger.debug("System user users change signal recv: {}".format(instance))
|
||||
queryset = model.objects.filter(pk__in=pk_set)
|
||||
if model == SystemUser:
|
||||
system_users = queryset
|
||||
else:
|
||||
system_users = [instance]
|
||||
for s in system_users:
|
||||
push_system_user_to_assets_manual.delay(s)
|
||||
usernames = model.objects.filter(pk__in=pk_set).values_list('username', flat=True)
|
||||
|
||||
for username in usernames:
|
||||
push_system_user_to_assets_manual.delay(instance, username)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=SystemUser.nodes.through)
|
||||
@on_transaction_commit
|
||||
def on_system_user_nodes_change(sender, instance=None, action=None, model=None, pk_set=None, **kwargs):
|
||||
"""
|
||||
当系统用户和节点关系发生变化时,应该将节点下资产关联到新的系统用户上
|
||||
@@ -140,7 +150,7 @@ def on_system_user_groups_change(instance, action, pk_set, reverse, **kwargs):
|
||||
logger.info("System user groups update signal recv: {}".format(instance))
|
||||
|
||||
users = User.objects.filter(groups__id__in=pk_set).distinct()
|
||||
instance.users.add(users)
|
||||
instance.users.add(*users)
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
|
||||
@@ -3,10 +3,13 @@
|
||||
|
||||
from celery import shared_task
|
||||
|
||||
from orgs.utils import tmp_to_root_org
|
||||
|
||||
__all__ = ['add_nodes_assets_to_system_users']
|
||||
|
||||
|
||||
@shared_task
|
||||
@tmp_to_root_org()
|
||||
def add_nodes_assets_to_system_users(nodes_keys, system_users):
|
||||
from ..models import Node
|
||||
assets = Node.get_nodes_all_assets(nodes_keys).values_list('id', flat=True)
|
||||
|
||||
@@ -1,14 +1,27 @@
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from orgs.models import Organization
|
||||
from orgs.utils import tmp_to_org
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from assets.utils import check_node_assets_amount
|
||||
|
||||
from common.utils.lock import AcquireFailed
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import now
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@shared_task()
|
||||
def check_node_assets_amount_celery_task():
|
||||
logger.info(f'>>> {now()} begin check_node_assets_amount_celery_task ...')
|
||||
check_node_assets_amount()
|
||||
logger.info(f'>>> {now()} end check_node_assets_amount_celery_task ...')
|
||||
@shared_task(queue='celery_heavy_tasks')
|
||||
def check_node_assets_amount_task(org_id=Organization.ROOT_ID):
|
||||
try:
|
||||
with tmp_to_org(Organization.get_instance(org_id)):
|
||||
check_node_assets_amount()
|
||||
except AcquireFailed:
|
||||
logger.error(_('The task of self-checking is already running and cannot be started repeatedly'))
|
||||
|
||||
|
||||
@register_as_period_task(crontab='0 2 * * *')
|
||||
@shared_task(queue='celery_heavy_tasks')
|
||||
def check_node_assets_amount_period_task():
|
||||
check_node_assets_amount_task()
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
from itertools import groupby
|
||||
from celery import shared_task
|
||||
from common.db.utils import get_object_if_need, get_objects_if_need
|
||||
from common.db.utils import get_object_if_need, get_objects
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.db.models import Empty
|
||||
|
||||
from common.utils import encrypt_password, get_logger
|
||||
from assets.models import SystemUser, Asset
|
||||
from orgs.utils import org_aware_func
|
||||
from assets.models import SystemUser, Asset, AuthBook
|
||||
from orgs.utils import org_aware_func, tmp_to_root_org
|
||||
from . import const
|
||||
from .utils import clean_ansible_task_hosts, group_asset_by_platform
|
||||
|
||||
@@ -36,6 +36,7 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
username = system_user.username
|
||||
password = system_user.password
|
||||
public_key = system_user.public_key
|
||||
comment = system_user.name
|
||||
|
||||
groups = _split_by_comma(system_user.system_groups)
|
||||
|
||||
@@ -47,7 +48,8 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
'shell': system_user.shell or Empty,
|
||||
'state': 'present',
|
||||
'home': system_user.home or Empty,
|
||||
'groups': groups or Empty
|
||||
'groups': groups or Empty,
|
||||
'comment': comment
|
||||
}
|
||||
|
||||
tasks = [
|
||||
@@ -64,24 +66,27 @@ def get_push_unixlike_system_user_tasks(system_user, username=None):
|
||||
'module': 'group',
|
||||
'args': 'name={} state=present'.format(username),
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'Check home dir exists',
|
||||
'action': {
|
||||
'module': 'stat',
|
||||
'args': 'path=/home/{}'.format(username)
|
||||
},
|
||||
'register': 'home_existed'
|
||||
},
|
||||
{
|
||||
'name': "Set home dir permission",
|
||||
'action': {
|
||||
'module': 'file',
|
||||
'args': "path=/home/{0} owner={0} group={0} mode=700".format(username)
|
||||
},
|
||||
'when': 'home_existed.stat.exists == true'
|
||||
}
|
||||
]
|
||||
if not system_user.home:
|
||||
tasks.extend([
|
||||
{
|
||||
'name': 'Check home dir exists',
|
||||
'action': {
|
||||
'module': 'stat',
|
||||
'args': 'path=/home/{}'.format(username)
|
||||
},
|
||||
'register': 'home_existed'
|
||||
},
|
||||
{
|
||||
'name': "Set home dir permission",
|
||||
'action': {
|
||||
'module': 'file',
|
||||
'args': "path=/home/{0} owner={0} group={0} mode=700".format(username)
|
||||
},
|
||||
'when': 'home_existed.stat.exists == true'
|
||||
}
|
||||
])
|
||||
if password:
|
||||
tasks.append({
|
||||
'name': 'Set {} password'.format(username),
|
||||
@@ -134,6 +139,7 @@ def get_push_windows_system_user_tasks(system_user, username=None):
|
||||
|
||||
tasks = []
|
||||
if not password:
|
||||
logger.error("Error: no password found")
|
||||
return tasks
|
||||
task = {
|
||||
'name': 'Add user {}'.format(username),
|
||||
@@ -184,15 +190,12 @@ def get_push_system_user_tasks(system_user, platform="unixlike", username=None):
|
||||
@org_aware_func("system_user")
|
||||
def push_system_user_util(system_user, assets, task_name, username=None):
|
||||
from ops.utils import update_or_create_ansible_task
|
||||
hosts = clean_ansible_task_hosts(assets, system_user=system_user)
|
||||
if not hosts:
|
||||
assets = clean_ansible_task_hosts(assets, system_user=system_user)
|
||||
if not assets:
|
||||
return {}
|
||||
|
||||
platform_hosts_map = {}
|
||||
hosts_sorted = sorted(hosts, key=group_asset_by_platform)
|
||||
platform_hosts = groupby(hosts_sorted, key=group_asset_by_platform)
|
||||
for i in platform_hosts:
|
||||
platform_hosts_map[i[0]] = list(i[1])
|
||||
assets_sorted = sorted(assets, key=group_asset_by_platform)
|
||||
platform_hosts = groupby(assets_sorted, key=group_asset_by_platform)
|
||||
|
||||
def run_task(_tasks, _hosts):
|
||||
if not _tasks:
|
||||
@@ -203,26 +206,59 @@ def push_system_user_util(system_user, assets, task_name, username=None):
|
||||
)
|
||||
task.run()
|
||||
|
||||
for platform, _hosts in platform_hosts_map.items():
|
||||
if not _hosts:
|
||||
if system_user.username_same_with_user:
|
||||
if username is None:
|
||||
# 动态系统用户,但是没有指定 username
|
||||
usernames = list(system_user.users.all().values_list('username', flat=True).distinct())
|
||||
else:
|
||||
usernames = [username]
|
||||
else:
|
||||
# 非动态系统用户指定 username 无效
|
||||
assert username is None, 'Only Dynamic user can assign `username`'
|
||||
usernames = [system_user.username]
|
||||
|
||||
for platform, _assets in platform_hosts:
|
||||
_assets = list(_assets)
|
||||
if not _assets:
|
||||
continue
|
||||
print(_("Start push system user for platform: [{}]").format(platform))
|
||||
print(_("Hosts count: {}").format(len(_hosts)))
|
||||
print(_("Hosts count: {}").format(len(_assets)))
|
||||
|
||||
if not system_user.has_special_auth():
|
||||
logger.debug("System user not has special auth")
|
||||
tasks = get_push_system_user_tasks(system_user, platform, username=username)
|
||||
run_task(tasks, _hosts)
|
||||
continue
|
||||
id_asset_map = {_asset.id: _asset for _asset in _assets}
|
||||
assets_id = id_asset_map.keys()
|
||||
no_special_auth = []
|
||||
special_auth_set = set()
|
||||
|
||||
for _host in _hosts:
|
||||
system_user.load_asset_special_auth(_host)
|
||||
tasks = get_push_system_user_tasks(system_user, platform, username=username)
|
||||
run_task(tasks, [_host])
|
||||
auth_books = AuthBook.objects.filter(username__in=usernames, asset_id__in=assets_id)
|
||||
|
||||
for auth_book in auth_books:
|
||||
special_auth_set.add((auth_book.username, auth_book.asset_id))
|
||||
|
||||
for _username in usernames:
|
||||
no_special_assets = []
|
||||
for asset_id in assets_id:
|
||||
if (_username, asset_id) not in special_auth_set:
|
||||
no_special_assets.append(id_asset_map[asset_id])
|
||||
if no_special_assets:
|
||||
no_special_auth.append((_username, no_special_assets))
|
||||
|
||||
for _username, no_special_assets in no_special_auth:
|
||||
tasks = get_push_system_user_tasks(system_user, platform, username=_username)
|
||||
run_task(tasks, no_special_assets)
|
||||
|
||||
for auth_book in auth_books:
|
||||
system_user._merge_auth(auth_book)
|
||||
tasks = get_push_system_user_tasks(system_user, platform, username=auth_book.username)
|
||||
asset = id_asset_map[auth_book.asset_id]
|
||||
run_task(tasks, [asset])
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@tmp_to_root_org()
|
||||
def push_system_user_to_assets_manual(system_user, username=None):
|
||||
"""
|
||||
将系统用户推送到与它关联的所有资产上
|
||||
"""
|
||||
system_user = get_object_if_need(SystemUser, system_user)
|
||||
assets = system_user.get_related_assets()
|
||||
task_name = _("Push system users to assets: {}").format(system_user.name)
|
||||
@@ -230,7 +266,11 @@ def push_system_user_to_assets_manual(system_user, username=None):
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
@tmp_to_root_org()
|
||||
def push_system_user_a_asset_manual(system_user, asset, username=None):
|
||||
"""
|
||||
将系统用户推送到一个资产上
|
||||
"""
|
||||
if username is None:
|
||||
username = system_user.username
|
||||
task_name = _("Push system users to asset: {}({}) => {}").format(
|
||||
@@ -240,10 +280,15 @@ def push_system_user_a_asset_manual(system_user, asset, username=None):
|
||||
|
||||
|
||||
@shared_task(queue="ansible")
|
||||
def push_system_user_to_assets(system_user, assets, username=None):
|
||||
@tmp_to_root_org()
|
||||
def push_system_user_to_assets(system_user_id, assets_id, username=None):
|
||||
"""
|
||||
推送系统用户到指定的若干资产上
|
||||
"""
|
||||
system_user = SystemUser.objects.get(id=system_user_id)
|
||||
assets = get_objects(Asset, assets_id)
|
||||
task_name = _("Push system users to assets: {}").format(system_user.name)
|
||||
system_user = get_object_if_need(SystemUser, system_user)
|
||||
assets = get_objects_if_need(Asset, assets)
|
||||
|
||||
return push_system_user_util(system_user, assets, task_name, username=username)
|
||||
|
||||
# @shared_task
|
||||
|
||||
@@ -45,6 +45,7 @@ urlpatterns = [
|
||||
path('admin-users/<uuid:pk>/assets/', api.AdminUserAssetsListView.as_view(), name='admin-user-assets'),
|
||||
|
||||
path('system-users/<uuid:pk>/auth-info/', api.SystemUserAuthInfoApi.as_view(), name='system-user-auth-info'),
|
||||
path('system-users/<uuid:pk>/assets/', api.SystemUserAssetsListView.as_view(), name='system-user-assets'),
|
||||
path('system-users/<uuid:pk>/assets/<uuid:aid>/auth-info/', api.SystemUserAssetAuthInfoApi.as_view(), name='system-user-asset-auth-info'),
|
||||
path('system-users/<uuid:pk>/tasks/', api.SystemUserTaskApi.as_view(), name='system-user-task-create'),
|
||||
path('system-users/<uuid:pk>/cmd-filter-rules/', api.SystemUserCommandFilterRuleListApi.as_view(), name='system-user-cmd-filter-rule-list'),
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
#
|
||||
import time
|
||||
|
||||
from django.db.models import Q
|
||||
|
||||
from common.utils import get_logger, dict_get_any, is_uuid, get_object_or_none
|
||||
from common.utils.lock import DistributedLock
|
||||
from common.http import is_true
|
||||
from .models import Asset, Node
|
||||
|
||||
@@ -10,17 +13,21 @@ from .models import Asset, Node
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@DistributedLock(name="assets.node.check_node_assets_amount", blocking=False)
|
||||
def check_node_assets_amount():
|
||||
for node in Node.objects.all():
|
||||
logger.info(f'Check node assets amount: {node}')
|
||||
assets_amount = Asset.objects.filter(
|
||||
Q(nodes__key__istartswith=f'{node.key}:') | Q(nodes=node)
|
||||
).distinct().count()
|
||||
|
||||
if node.assets_amount != assets_amount:
|
||||
print(f'>>> <Node:{node.key}> wrong assets amount '
|
||||
f'{node.assets_amount} right is {assets_amount}')
|
||||
logger.warn(f'Node wrong assets amount <Node:{node.key}> '
|
||||
f'{node.assets_amount} right is {assets_amount}')
|
||||
node.assets_amount = assets_amount
|
||||
node.save()
|
||||
# 防止自检程序给数据库的压力太大
|
||||
time.sleep(0.1)
|
||||
|
||||
|
||||
def is_asset_exists_in_node(asset_pk, node_key):
|
||||
@@ -33,7 +40,7 @@ def is_asset_exists_in_node(asset_pk, node_key):
|
||||
|
||||
def is_query_node_all_assets(request):
|
||||
request = request
|
||||
query_all_arg = request.query_params.get('all')
|
||||
query_all_arg = request.query_params.get('all', 'true')
|
||||
show_current_asset_arg = request.query_params.get('show_current_asset')
|
||||
if show_current_asset_arg is not None:
|
||||
return not is_true(show_current_asset_arg)
|
||||
|
||||
18
apps/audits/migrations/0011_userloginlog_backend.py
Normal file
18
apps/audits/migrations/0011_userloginlog_backend.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 3.1 on 2020-12-09 03:03
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audits', '0010_auto_20200811_1122'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='userloginlog',
|
||||
name='backend',
|
||||
field=models.CharField(default='', max_length=32, verbose_name='Authentication backend'),
|
||||
),
|
||||
]
|
||||
@@ -105,6 +105,7 @@ class UserLoginLog(models.Model):
|
||||
reason = models.CharField(default='', max_length=128, blank=True, verbose_name=_('Reason'))
|
||||
status = models.BooleanField(max_length=2, default=True, choices=STATUS_CHOICE, verbose_name=_('Status'))
|
||||
datetime = models.DateTimeField(default=timezone.now, verbose_name=_('Date login'))
|
||||
backend = models.CharField(max_length=32, default='', verbose_name=_('Authentication backend'))
|
||||
|
||||
@classmethod
|
||||
def get_login_logs(cls, date_from=None, date_to=None, user=None, keyword=None):
|
||||
|
||||
@@ -12,7 +12,7 @@ from . import models
|
||||
|
||||
|
||||
class FTPLogSerializer(serializers.ModelSerializer):
|
||||
operate_display = serializers.ReadOnlyField(source='get_operate_display')
|
||||
operate_display = serializers.ReadOnlyField(source='get_operate_display', label=_('Operate for display'))
|
||||
|
||||
class Meta:
|
||||
model = models.FTPLog
|
||||
@@ -23,16 +23,20 @@ class FTPLogSerializer(serializers.ModelSerializer):
|
||||
|
||||
|
||||
class UserLoginLogSerializer(serializers.ModelSerializer):
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display')
|
||||
status_display = serializers.ReadOnlyField(source='get_status_display')
|
||||
mfa_display = serializers.ReadOnlyField(source='get_mfa_display')
|
||||
type_display = serializers.ReadOnlyField(source='get_type_display', label=_('Type for display'))
|
||||
status_display = serializers.ReadOnlyField(source='get_status_display', label=_('Status for display'))
|
||||
mfa_display = serializers.ReadOnlyField(source='get_mfa_display', label=_('MFA for display'))
|
||||
|
||||
class Meta:
|
||||
model = models.UserLoginLog
|
||||
fields = (
|
||||
'id', 'username', 'type', 'type_display', 'ip', 'city', 'user_agent',
|
||||
'mfa', 'reason', 'status', 'status_display', 'datetime', 'mfa_display'
|
||||
'mfa', 'reason', 'status', 'status_display', 'datetime', 'mfa_display',
|
||||
'backend'
|
||||
)
|
||||
extra_kwargs = {
|
||||
"user_agent": {'label': _('User agent')}
|
||||
}
|
||||
|
||||
|
||||
class OperateLogSerializer(serializers.ModelSerializer):
|
||||
@@ -75,6 +79,8 @@ class CommandExecutionSerializer(serializers.ModelSerializer):
|
||||
'hosts': {'label': _('Hosts')}, # 外键,会生成 sql。不在 model 上修改
|
||||
'run_as': {'label': _('Run as')},
|
||||
'user': {'label': _('User')},
|
||||
'run_as_display': {'label': _('Run as for display')},
|
||||
'user_display': {'label': _('User for display')},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -5,6 +5,8 @@ from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.db import transaction
|
||||
from django.utils import timezone
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.request import Request
|
||||
|
||||
@@ -32,6 +34,19 @@ MODELS_NEED_RECORD = (
|
||||
)
|
||||
|
||||
|
||||
LOGIN_BACKEND = {
|
||||
'PublicKeyAuthBackend': _('SSH Key'),
|
||||
'RadiusBackend': User.Source.radius.label,
|
||||
'RadiusRealmBackend': User.Source.radius.label,
|
||||
'LDAPAuthorizationBackend': User.Source.ldap.label,
|
||||
'ModelBackend': _('Password'),
|
||||
'SSOAuthentication': _('SSO'),
|
||||
'CASBackend': User.Source.cas.label,
|
||||
'OIDCAuthCodeBackend': User.Source.openid.label,
|
||||
'OIDCAuthPasswordBackend': User.Source.openid.label,
|
||||
}
|
||||
|
||||
|
||||
def create_operate_log(action, sender, resource):
|
||||
user = current_request.user if current_request else None
|
||||
if not user or not user.is_authenticated:
|
||||
@@ -109,6 +124,16 @@ def on_audits_log_create(sender, instance=None, **kwargs):
|
||||
sys_logger.info(msg)
|
||||
|
||||
|
||||
def get_login_backend(request):
|
||||
backend = request.session.get(BACKEND_SESSION_KEY, '')
|
||||
backend = backend.rsplit('.', maxsplit=1)[-1]
|
||||
if backend in LOGIN_BACKEND:
|
||||
return LOGIN_BACKEND[backend]
|
||||
else:
|
||||
logger.warn(f'LOGIN_BACKEND_NOT_FOUND: {backend}')
|
||||
return ''
|
||||
|
||||
|
||||
def generate_data(username, request):
|
||||
user_agent = request.META.get('HTTP_USER_AGENT', '')
|
||||
login_ip = get_request_ip(request) or '0.0.0.0'
|
||||
@@ -122,7 +147,8 @@ def generate_data(username, request):
|
||||
'ip': login_ip,
|
||||
'type': login_type,
|
||||
'user_agent': user_agent,
|
||||
'datetime': timezone.now()
|
||||
'datetime': timezone.now(),
|
||||
'backend': get_login_backend(request)
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
@@ -2,32 +2,44 @@
|
||||
#
|
||||
import datetime
|
||||
from django.utils import timezone
|
||||
from django.conf import settings
|
||||
from celery import shared_task
|
||||
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from ops.celery.decorator import (
|
||||
register_as_period_task, after_app_shutdown_clean_periodic
|
||||
)
|
||||
from .models import UserLoginLog, OperateLog
|
||||
from common.utils import get_log_keep_day
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
@shared_task
|
||||
@after_app_shutdown_clean_periodic
|
||||
def clean_login_log_period():
|
||||
now = timezone.now()
|
||||
try:
|
||||
days = int(settings.LOGIN_LOG_KEEP_DAYS)
|
||||
except ValueError:
|
||||
days = 9999
|
||||
days = get_log_keep_day('LOGIN_LOG_KEEP_DAYS')
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
UserLoginLog.objects.filter(datetime__lt=expired_day).delete()
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
@shared_task
|
||||
@after_app_shutdown_clean_periodic
|
||||
def clean_operation_log_period():
|
||||
now = timezone.now()
|
||||
try:
|
||||
days = int(settings.LOGIN_LOG_KEEP_DAYS)
|
||||
except ValueError:
|
||||
days = 9999
|
||||
days = get_log_keep_day('OPERATE_LOG_KEEP_DAYS')
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
OperateLog.objects.filter(datetime__lt=expired_day).delete()
|
||||
|
||||
|
||||
@shared_task
|
||||
def clean_ftp_log_period():
|
||||
now = timezone.now()
|
||||
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
OperateLog.objects.filter(datetime__lt=expired_day).delete()
|
||||
|
||||
|
||||
@register_as_period_task(interval=3600*24)
|
||||
@shared_task
|
||||
def clean_audits_log_period():
|
||||
clean_login_log_period()
|
||||
clean_operation_log_period()
|
||||
clean_ftp_log_period()
|
||||
|
||||
@@ -54,12 +54,3 @@ class UserConnectionTokenApi(RootOrgViewMixin, APIView):
|
||||
return Response(value)
|
||||
else:
|
||||
return Response({'user': value['user']})
|
||||
|
||||
def get_permissions(self):
|
||||
if self.request.query_params.get('user-only', None):
|
||||
self.permission_classes = (AllowAny,)
|
||||
return super().get_permissions()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -60,6 +60,7 @@ class SSOViewSet(AuthMixin, JmsGenericViewSet):
|
||||
此接口违反了 `Restful` 的规范
|
||||
`GET` 应该是安全的方法,但此接口是不安全的
|
||||
"""
|
||||
request.META['HTTP_X_JMS_LOGIN_TYPE'] = 'W'
|
||||
authkey = request.query_params.get(AUTH_KEY)
|
||||
next_url = request.query_params.get(NEXT_URL)
|
||||
if not next_url or not next_url.startswith('/'):
|
||||
|
||||
@@ -6,7 +6,7 @@ import time
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils.six import text_type
|
||||
from six import text_type
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.contrib.auth.backends import ModelBackend
|
||||
from rest_framework import HTTP_HEADER_ENCODING
|
||||
|
||||
@@ -82,6 +82,12 @@ class LDAPAuthorizationBackend(LDAPBackend):
|
||||
|
||||
class LDAPUser(_LDAPUser):
|
||||
|
||||
def _search_for_user_dn_from_ldap_util(self):
|
||||
from settings.utils import LDAPServerUtil
|
||||
util = LDAPServerUtil()
|
||||
user_dn = util.search_for_user_dn(self._username)
|
||||
return user_dn
|
||||
|
||||
def _search_for_user_dn(self):
|
||||
"""
|
||||
This method was overridden because the AUTH_LDAP_USER_SEARCH
|
||||
@@ -107,7 +113,14 @@ class LDAPUser(_LDAPUser):
|
||||
if results is not None and len(results) == 1:
|
||||
(user_dn, self._user_attrs) = next(iter(results))
|
||||
else:
|
||||
user_dn = None
|
||||
# 解决直接配置DC域,用户认证失败的问题(库不能从整棵树中搜索)
|
||||
user_dn = self._search_for_user_dn_from_ldap_util()
|
||||
if user_dn is None:
|
||||
self._user_dn = None
|
||||
self._user_attrs = None
|
||||
else:
|
||||
self._user_dn = user_dn
|
||||
self._user_attrs = self._load_user_attrs()
|
||||
|
||||
return user_dn
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ class CreateUserMixin:
|
||||
email_suffix = settings.EMAIL_SUFFIX
|
||||
email = '{}@{}'.format(username, email_suffix)
|
||||
user = User(username=username, name=username, email=email)
|
||||
user.source = user.SOURCE_RADIUS
|
||||
user.source = user.Source.radius.value
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
||||
@@ -218,5 +218,14 @@ class PasswdTooSimple(JMSException):
|
||||
default_detail = _('Your password is too simple, please change it for security')
|
||||
|
||||
def __init__(self, url, *args, **kwargs):
|
||||
super(PasswdTooSimple, self).__init__(*args, **kwargs)
|
||||
super().__init__(*args, **kwargs)
|
||||
self.url = url
|
||||
|
||||
|
||||
class PasswordRequireResetError(JMSException):
|
||||
default_code = 'passwd_has_expired'
|
||||
default_detail = _('Your password has expired, please reset before logging in')
|
||||
|
||||
def __init__(self, url, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.url = url
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from captcha.fields import CaptchaField
|
||||
from captcha.fields import CaptchaField, CaptchaTextInput
|
||||
|
||||
|
||||
class UserLoginForm(forms.Form):
|
||||
@@ -26,8 +26,12 @@ class UserCheckOtpCodeForm(forms.Form):
|
||||
otp_code = forms.CharField(label=_('MFA code'), max_length=6)
|
||||
|
||||
|
||||
class CustomCaptchaTextInput(CaptchaTextInput):
|
||||
template_name = 'authentication/_captcha_field.html'
|
||||
|
||||
|
||||
class CaptchaMixin(forms.Form):
|
||||
captcha = CaptchaField()
|
||||
captcha = CaptchaField(widget=CustomCaptchaTextInput)
|
||||
|
||||
|
||||
class ChallengeMixin(forms.Form):
|
||||
|
||||
@@ -53,7 +53,7 @@ class AuthMixin:
|
||||
ip = ip or get_request_ip(self.request)
|
||||
return ip
|
||||
|
||||
def check_is_block(self):
|
||||
def check_is_block(self, raise_exception=True):
|
||||
if hasattr(self.request, 'data'):
|
||||
username = self.request.data.get("username")
|
||||
else:
|
||||
@@ -61,7 +61,11 @@ class AuthMixin:
|
||||
ip = self.get_request_ip()
|
||||
if is_block_login(username, ip):
|
||||
logger.warn('Ip was blocked' + ': ' + username + ':' + ip)
|
||||
raise errors.BlockLoginError(username=username, ip=ip)
|
||||
exception = errors.BlockLoginError(username=username, ip=ip)
|
||||
if raise_exception:
|
||||
raise errors.BlockLoginError(username=username, ip=ip)
|
||||
else:
|
||||
return exception
|
||||
|
||||
def decrypt_passwd(self, raw_passwd):
|
||||
# 获取解密密钥,对密码进行解密
|
||||
@@ -106,9 +110,8 @@ class AuthMixin:
|
||||
raise CredentialError(error=errors.reason_user_inactive)
|
||||
elif not user.is_active:
|
||||
raise CredentialError(error=errors.reason_user_inactive)
|
||||
elif user.password_has_expired:
|
||||
raise CredentialError(error=errors.reason_password_expired)
|
||||
|
||||
self._check_password_require_reset_or_not(user)
|
||||
self._check_passwd_is_too_simple(user, password)
|
||||
|
||||
clean_failed_count(username, ip)
|
||||
@@ -119,20 +122,34 @@ class AuthMixin:
|
||||
return user
|
||||
|
||||
@classmethod
|
||||
def _check_passwd_is_too_simple(cls, user, password):
|
||||
def generate_reset_password_url_with_flash_msg(cls, user: User, flash_view_name):
|
||||
reset_passwd_url = reverse('authentication:reset-password')
|
||||
query_str = urlencode({
|
||||
'token': user.generate_reset_token()
|
||||
})
|
||||
reset_passwd_url = f'{reset_passwd_url}?{query_str}'
|
||||
|
||||
flash_page_url = reverse(flash_view_name)
|
||||
query_str = urlencode({
|
||||
'redirect_url': reset_passwd_url
|
||||
})
|
||||
return f'{flash_page_url}?{query_str}'
|
||||
|
||||
@classmethod
|
||||
def _check_passwd_is_too_simple(cls, user: User, password):
|
||||
if user.is_superuser and password == 'admin':
|
||||
reset_passwd_url = reverse('authentication:reset-password')
|
||||
query_str = urlencode({
|
||||
'token': user.generate_reset_token()
|
||||
})
|
||||
reset_passwd_url = f'{reset_passwd_url}?{query_str}'
|
||||
url = cls.generate_reset_password_url_with_flash_msg(
|
||||
user, 'authentication:passwd-too-simple-flash-msg'
|
||||
)
|
||||
raise errors.PasswdTooSimple(url)
|
||||
|
||||
flash_page_url = reverse('authentication:passwd-too-simple-flash-msg')
|
||||
query_str = urlencode({
|
||||
'redirect_url': reset_passwd_url
|
||||
})
|
||||
|
||||
raise errors.PasswdTooSimple(f'{flash_page_url}?{query_str}')
|
||||
@classmethod
|
||||
def _check_password_require_reset_or_not(cls, user: User):
|
||||
if user.password_has_expired:
|
||||
url = cls.generate_reset_password_url_with_flash_msg(
|
||||
user, 'authentication:passwd-has-expired-flash-msg'
|
||||
)
|
||||
raise errors.PasswordRequireResetError(url)
|
||||
|
||||
def check_user_auth_if_need(self, decrypt_passwd=False):
|
||||
request = self.request
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import uuid
|
||||
from functools import partial
|
||||
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import ugettext_lazy as _, ugettext as __
|
||||
from rest_framework.authtoken.models import Token
|
||||
from django.conf import settings
|
||||
from django.utils.crypto import get_random_string
|
||||
|
||||
from common.db import models
|
||||
from common.mixins.models import CommonModelMixin
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from importlib import import_module
|
||||
|
||||
from django.contrib.auth import BACKEND_SESSION_KEY
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import user_logged_in
|
||||
from django.core.cache import cache
|
||||
@@ -24,14 +25,17 @@ def on_user_auth_login_success(sender, user, request, **kwargs):
|
||||
|
||||
@receiver(openid_user_login_success)
|
||||
def on_oidc_user_login_success(sender, request, user, **kwargs):
|
||||
request.session[BACKEND_SESSION_KEY] = 'OIDCAuthCodeBackend'
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
|
||||
@receiver(openid_user_login_failed)
|
||||
def on_oidc_user_login_failed(sender, username, request, reason, **kwargs):
|
||||
request.session[BACKEND_SESSION_KEY] = 'OIDCAuthCodeBackend'
|
||||
post_auth_failed.send(sender, username=username, request=request, reason=reason)
|
||||
|
||||
|
||||
@receiver(cas_user_authenticated)
|
||||
def on_cas_user_login_success(sender, request, user, **kwargs):
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
request.session[BACKEND_SESSION_KEY] = 'CASBackend'
|
||||
post_auth_success.send(sender, user=user, request=request)
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
{% load i18n %}
|
||||
{% spaceless %}
|
||||
<img src="{{ image }}" alt="captcha" class="captcha" />
|
||||
<div class="row" style="padding-bottom: 10px">
|
||||
<div class="col-sm-6">
|
||||
<div class="input-group-prepend">
|
||||
{% if audio %}
|
||||
<a title="{% trans "Play CAPTCHA as audio file" %}" href="{{ audio }}">
|
||||
{% endif %}
|
||||
</div>
|
||||
{% include "django/forms/widgets/multiwidget.html" %}
|
||||
</div>
|
||||
</div>
|
||||
<script>
|
||||
var placeholder = '{% trans "Captcha" %}'
|
||||
function refresh_captcha() {
|
||||
$.getJSON("{% url "captcha-refresh" %}",
|
||||
function (result) {
|
||||
$('.captcha').attr('src', result['image_url']);
|
||||
$('#id_captcha_0').val(result['key'])
|
||||
})
|
||||
}
|
||||
$(document).ready(function () {
|
||||
$('.captcha').click(refresh_captcha)
|
||||
$('#id_captcha_1').addClass('form-control').attr('placeholder', placeholder)
|
||||
})
|
||||
</script>
|
||||
|
||||
{% endspaceless %}
|
||||
@@ -20,7 +20,7 @@
|
||||
<div class="form-group">
|
||||
<input type="text" class="form-control" name="otp_code" placeholder="" required="" autofocus="autofocus">
|
||||
<span class="help-block">
|
||||
{% trans 'Open Google Authenticator and enter the 6-bit dynamic code' %}
|
||||
{% trans 'Open MFA Authenticator and enter the 6-bit dynamic code' %}
|
||||
</span>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary block full-width m-b">{% trans 'Next' %}</button>
|
||||
|
||||
@@ -22,6 +22,7 @@ urlpatterns = [
|
||||
name='forgot-password-sendmail-success'),
|
||||
path('password/reset/', users_view.UserResetPasswordView.as_view(), name='reset-password'),
|
||||
path('password/too-simple-flash-msg/', views.FlashPasswdTooSimpleMsgView.as_view(), name='passwd-too-simple-flash-msg'),
|
||||
path('password/has-expired-msg/', views.FlashPasswdHasExpiredMsgView.as_view(), name='passwd-has-expired-flash-msg'),
|
||||
path('password/reset/success/', users_view.UserResetPasswordSuccessView.as_view(), name='reset-password-success'),
|
||||
path('password/verify/', users_view.UserVerifyPasswordView.as_view(), name='user-verify-password'),
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ from ..forms import get_user_login_form_cls
|
||||
__all__ = [
|
||||
'UserLoginView', 'UserLogoutView',
|
||||
'UserLoginGuardView', 'UserLoginWaitConfirmView',
|
||||
'FlashPasswdTooSimpleMsgView',
|
||||
'FlashPasswdTooSimpleMsgView', 'FlashPasswdHasExpiredMsgView'
|
||||
]
|
||||
|
||||
|
||||
@@ -87,6 +87,7 @@ class UserLoginView(mixins.AuthMixin, FormView):
|
||||
try:
|
||||
self.check_user_auth(decrypt_passwd=True)
|
||||
except errors.AuthFailedError as e:
|
||||
e = self.check_is_block(raise_exception=False) or e
|
||||
form.add_error(None, e.msg)
|
||||
ip = self.get_request_ip()
|
||||
cache.set(self.key_prefix_captcha.format(ip), 1, 3600)
|
||||
@@ -95,7 +96,7 @@ class UserLoginView(mixins.AuthMixin, FormView):
|
||||
new_form._errors = form.errors
|
||||
context = self.get_context_data(form=new_form)
|
||||
return self.render_to_response(context)
|
||||
except errors.PasswdTooSimple as e:
|
||||
except (errors.PasswdTooSimple, errors.PasswordRequireResetError) as e:
|
||||
return redirect(e.url)
|
||||
self.clear_rsa_key()
|
||||
return self.redirect_to_guard_view()
|
||||
@@ -249,3 +250,18 @@ class FlashPasswdTooSimpleMsgView(TemplateView):
|
||||
'auto_redirect': True,
|
||||
}
|
||||
return self.render_to_response(context)
|
||||
|
||||
|
||||
@method_decorator(never_cache, name='dispatch')
|
||||
class FlashPasswdHasExpiredMsgView(TemplateView):
|
||||
template_name = 'flash_message_standalone.html'
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
context = {
|
||||
'title': _('Please change your password'),
|
||||
'messages': _('Your password has expired, please reset before logging in'),
|
||||
'interval': 5,
|
||||
'redirect_url': request.GET.get('redirect_url'),
|
||||
'auto_redirect': True,
|
||||
}
|
||||
return self.render_to_response(context)
|
||||
|
||||
@@ -25,3 +25,16 @@ def get_objects_if_need(model, pks):
|
||||
logger.error(f'DoesNotExist: <{model.__name__}: {not_found_pks}>')
|
||||
return objs
|
||||
return pks
|
||||
|
||||
|
||||
def get_objects(model, pks):
|
||||
if not pks:
|
||||
return pks
|
||||
|
||||
objs = list(model.objects.filter(id__in=pks))
|
||||
if len(objs) != len(pks):
|
||||
pks = set(pks)
|
||||
exists_pks = {o.id for o in objs}
|
||||
not_found_pks = ','.join(pks - exists_pks)
|
||||
logger.error(f'DoesNotExist: <{model.__name__}: {not_found_pks}>')
|
||||
return objs
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
from django.core.exceptions import PermissionDenied, ObjectDoesNotExist as DJObjectDoesNotExist
|
||||
from django.http import Http404
|
||||
from django.utils.translation import gettext
|
||||
from django.db.models.deletion import ProtectedError
|
||||
from rest_framework import exceptions
|
||||
from rest_framework.views import set_rollback
|
||||
from rest_framework.response import Response
|
||||
|
||||
from common.exceptions import JMSObjectDoesNotExist
|
||||
from common.utils import get_logger
|
||||
from common.exceptions import JMSObjectDoesNotExist, ReferencedByOthers
|
||||
from logging import getLogger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
logger = getLogger('drf_exception')
|
||||
unexpected_exception_logger = getLogger('unexpected_exception')
|
||||
|
||||
|
||||
def extract_object_name(exc, index=0):
|
||||
@@ -30,6 +32,8 @@ def common_exception_handler(exc, context):
|
||||
exc = exceptions.PermissionDenied()
|
||||
elif isinstance(exc, DJObjectDoesNotExist):
|
||||
exc = JMSObjectDoesNotExist(object_name=extract_object_name(exc, 0))
|
||||
elif isinstance(exc, ProtectedError):
|
||||
exc = ReferencedByOthers()
|
||||
|
||||
if isinstance(exc, exceptions.APIException):
|
||||
headers = {}
|
||||
@@ -38,12 +42,14 @@ def common_exception_handler(exc, context):
|
||||
if getattr(exc, 'wait', None):
|
||||
headers['Retry-After'] = '%d' % exc.wait
|
||||
|
||||
if isinstance(exc.detail, (list, dict)):
|
||||
data = exc.detail
|
||||
if isinstance(exc.detail, str) and isinstance(exc.get_codes(), str):
|
||||
data = {'detail': exc.detail, 'code': exc.get_codes()}
|
||||
else:
|
||||
data = {'detail': exc.detail}
|
||||
data = exc.detail
|
||||
|
||||
set_rollback()
|
||||
return Response(data, status=exc.status_code, headers=headers)
|
||||
else:
|
||||
unexpected_exception_logger.exception('')
|
||||
|
||||
return None
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections import OrderedDict
|
||||
from django.core.exceptions import PermissionDenied
|
||||
from django.http import Http404
|
||||
from django.utils.encoding import force_text
|
||||
from rest_framework.fields import empty
|
||||
|
||||
from rest_framework.metadata import SimpleMetadata
|
||||
from rest_framework import exceptions, serializers
|
||||
@@ -58,6 +59,10 @@ class SimpleMetadataWithFilters(SimpleMetadata):
|
||||
field_info['type'] = self.label_lookup[field]
|
||||
field_info['required'] = getattr(field, 'required', False)
|
||||
|
||||
default = getattr(field, 'default', False)
|
||||
if default and isinstance(default, (str, int)):
|
||||
field_info['default'] = default
|
||||
|
||||
for attr in self.attrs:
|
||||
value = getattr(field, attr, None)
|
||||
if value is not None and value != '':
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
from .csv import *
|
||||
from .csv import *
|
||||
from .excel import *
|
||||
136
apps/common/drf/parsers/base.py
Normal file
136
apps/common/drf/parsers/base.py
Normal file
@@ -0,0 +1,136 @@
|
||||
import abc
|
||||
import json
|
||||
import codecs
|
||||
from rest_framework import serializers
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from rest_framework.parsers import BaseParser
|
||||
from rest_framework import status
|
||||
from rest_framework.exceptions import ParseError, APIException
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class FileContentOverflowedError(APIException):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
default_code = 'file_content_overflowed'
|
||||
default_detail = _('The file content overflowed (The maximum length `{}` bytes)')
|
||||
|
||||
|
||||
class BaseFileParser(BaseParser):
|
||||
|
||||
FILE_CONTENT_MAX_LENGTH = 1024 * 1024 * 10
|
||||
|
||||
serializer_cls = None
|
||||
|
||||
def check_content_length(self, meta):
|
||||
content_length = int(meta.get('CONTENT_LENGTH', meta.get('HTTP_CONTENT_LENGTH', 0)))
|
||||
if content_length > self.FILE_CONTENT_MAX_LENGTH:
|
||||
msg = FileContentOverflowedError.default_detail.format(self.FILE_CONTENT_MAX_LENGTH)
|
||||
logger.error(msg)
|
||||
raise FileContentOverflowedError(msg)
|
||||
|
||||
@staticmethod
|
||||
def get_stream_data(stream):
|
||||
stream_data = stream.read()
|
||||
stream_data = stream_data.strip(codecs.BOM_UTF8)
|
||||
return stream_data
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_rows(self, stream_data):
|
||||
raise NotImplemented
|
||||
|
||||
def get_column_titles(self, rows):
|
||||
return next(rows)
|
||||
|
||||
def convert_to_field_names(self, column_titles):
|
||||
fields_map = {}
|
||||
fields = self.serializer_cls().fields
|
||||
fields_map.update({v.label: k for k, v in fields.items()})
|
||||
fields_map.update({k: k for k, _ in fields.items()})
|
||||
field_names = [
|
||||
fields_map.get(column_title.strip('*'), '')
|
||||
for column_title in column_titles
|
||||
]
|
||||
return field_names
|
||||
|
||||
@staticmethod
|
||||
def _replace_chinese_quote(s):
|
||||
trans_table = str.maketrans({
|
||||
'“': '"',
|
||||
'”': '"',
|
||||
'‘': '"',
|
||||
'’': '"',
|
||||
'\'': '"'
|
||||
})
|
||||
return s.translate(trans_table)
|
||||
|
||||
@classmethod
|
||||
def process_row(cls, row):
|
||||
"""
|
||||
构建json数据前的行处理
|
||||
"""
|
||||
new_row = []
|
||||
for col in row:
|
||||
# 转换中文引号
|
||||
col = cls._replace_chinese_quote(col)
|
||||
# 列表/字典转换
|
||||
if isinstance(col, str) and (
|
||||
(col.startswith('[') and col.endswith(']'))
|
||||
or
|
||||
(col.startswith("{") and col.endswith("}"))
|
||||
):
|
||||
col = json.loads(col)
|
||||
new_row.append(col)
|
||||
return new_row
|
||||
|
||||
def process_row_data(self, row_data):
|
||||
"""
|
||||
构建json数据后的行数据处理
|
||||
"""
|
||||
new_row_data = {}
|
||||
serializer_fields = self.serializer_cls().fields
|
||||
for k, v in row_data.items():
|
||||
if isinstance(v, list) or isinstance(v, dict) or isinstance(v, str) and k.strip() and v.strip():
|
||||
# 解决类似disk_info为字符串的'{}'的问题
|
||||
if not isinstance(v, str) and isinstance(serializer_fields[k], serializers.CharField):
|
||||
v = str(v)
|
||||
new_row_data[k] = v
|
||||
return new_row_data
|
||||
|
||||
def generate_data(self, fields_name, rows):
|
||||
data = []
|
||||
for row in rows:
|
||||
# 空行不处理
|
||||
if not any(row):
|
||||
continue
|
||||
row = self.process_row(row)
|
||||
row_data = dict(zip(fields_name, row))
|
||||
row_data = self.process_row_data(row_data)
|
||||
data.append(row_data)
|
||||
return data
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
parser_context = parser_context or {}
|
||||
|
||||
try:
|
||||
view = parser_context['view']
|
||||
meta = view.request.META
|
||||
self.serializer_cls = view.get_serializer_class()
|
||||
except Exception as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
raise ParseError('The resource does not support imports!')
|
||||
|
||||
self.check_content_length(meta)
|
||||
|
||||
try:
|
||||
stream_data = self.get_stream_data(stream)
|
||||
rows = self.generate_rows(stream_data)
|
||||
column_titles = self.get_column_titles(rows)
|
||||
field_names = self.convert_to_field_names(column_titles)
|
||||
data = self.generate_data(field_names, rows)
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=True)
|
||||
raise ParseError('Parse error! ({})'.format(self.media_type))
|
||||
|
||||
@@ -1,32 +1,13 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
#
|
||||
|
||||
import json
|
||||
import chardet
|
||||
import codecs
|
||||
import unicodecsv
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework.parsers import BaseParser
|
||||
from rest_framework.exceptions import ParseError, APIException
|
||||
from rest_framework import status
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
from .base import BaseFileParser
|
||||
|
||||
|
||||
class CsvDataTooBig(APIException):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
default_code = 'csv_data_too_big'
|
||||
default_detail = _('The max size of CSV is %d bytes')
|
||||
|
||||
|
||||
class JMSCSVParser(BaseParser):
|
||||
"""
|
||||
Parses CSV file to serializer data
|
||||
"""
|
||||
CSV_UPLOAD_MAX_SIZE = 1024 * 1024 * 10
|
||||
class CSVFileParser(BaseFileParser):
|
||||
|
||||
media_type = 'text/csv'
|
||||
|
||||
@@ -38,99 +19,10 @@ class JMSCSVParser(BaseParser):
|
||||
for line in stream.splitlines():
|
||||
yield line
|
||||
|
||||
@staticmethod
|
||||
def _gen_rows(csv_data, charset='utf-8', **kwargs):
|
||||
csv_reader = unicodecsv.reader(csv_data, encoding=charset, **kwargs)
|
||||
def generate_rows(self, stream_data):
|
||||
detect_result = chardet.detect(stream_data)
|
||||
encoding = detect_result.get("encoding", "utf-8")
|
||||
lines = self._universal_newlines(stream_data)
|
||||
csv_reader = unicodecsv.reader(lines, encoding=encoding)
|
||||
for row in csv_reader:
|
||||
if not any(row): # 空行
|
||||
continue
|
||||
yield row
|
||||
|
||||
@staticmethod
|
||||
def _get_fields_map(serializer_cls):
|
||||
fields_map = {}
|
||||
fields = serializer_cls().fields
|
||||
fields_map.update({v.label: k for k, v in fields.items()})
|
||||
fields_map.update({k: k for k, _ in fields.items()})
|
||||
return fields_map
|
||||
|
||||
@staticmethod
|
||||
def _replace_chinese_quot(str_):
|
||||
trans_table = str.maketrans({
|
||||
'“': '"',
|
||||
'”': '"',
|
||||
'‘': '"',
|
||||
'’': '"',
|
||||
'\'': '"'
|
||||
})
|
||||
return str_.translate(trans_table)
|
||||
|
||||
@classmethod
|
||||
def _process_row(cls, row):
|
||||
"""
|
||||
构建json数据前的行处理
|
||||
"""
|
||||
_row = []
|
||||
|
||||
for col in row:
|
||||
# 列表转换
|
||||
if isinstance(col, str) and col.startswith('[') and col.endswith(']'):
|
||||
col = cls._replace_chinese_quot(col)
|
||||
col = json.loads(col)
|
||||
# 字典转换
|
||||
if isinstance(col, str) and col.startswith("{") and col.endswith("}"):
|
||||
col = cls._replace_chinese_quot(col)
|
||||
col = json.loads(col)
|
||||
_row.append(col)
|
||||
return _row
|
||||
|
||||
@staticmethod
|
||||
def _process_row_data(row_data):
|
||||
"""
|
||||
构建json数据后的行数据处理
|
||||
"""
|
||||
_row_data = {}
|
||||
for k, v in row_data.items():
|
||||
if isinstance(v, list) or isinstance(v, dict)\
|
||||
or isinstance(v, str) and k.strip() and v.strip():
|
||||
_row_data[k] = v
|
||||
return _row_data
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
parser_context = parser_context or {}
|
||||
try:
|
||||
view = parser_context['view']
|
||||
meta = view.request.META
|
||||
serializer_cls = view.get_serializer_class()
|
||||
except Exception as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
raise ParseError('The resource does not support imports!')
|
||||
|
||||
content_length = int(meta.get('CONTENT_LENGTH', meta.get('HTTP_CONTENT_LENGTH', 0)))
|
||||
if content_length > self.CSV_UPLOAD_MAX_SIZE:
|
||||
msg = CsvDataTooBig.default_detail % self.CSV_UPLOAD_MAX_SIZE
|
||||
logger.error(msg)
|
||||
raise CsvDataTooBig(msg)
|
||||
|
||||
try:
|
||||
stream_data = stream.read()
|
||||
stream_data = stream_data.strip(codecs.BOM_UTF8)
|
||||
detect_result = chardet.detect(stream_data)
|
||||
encoding = detect_result.get("encoding", "utf-8")
|
||||
binary = self._universal_newlines(stream_data)
|
||||
rows = self._gen_rows(binary, charset=encoding)
|
||||
|
||||
header = next(rows)
|
||||
fields_map = self._get_fields_map(serializer_cls)
|
||||
header = [fields_map.get(name.strip('*'), '') for name in header]
|
||||
|
||||
data = []
|
||||
for row in rows:
|
||||
row = self._process_row(row)
|
||||
row_data = dict(zip(header, row))
|
||||
row_data = self._process_row_data(row_data)
|
||||
data.append(row_data)
|
||||
return data
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=True)
|
||||
raise ParseError('CSV parse error!')
|
||||
|
||||
14
apps/common/drf/parsers/excel.py
Normal file
14
apps/common/drf/parsers/excel.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import pyexcel
|
||||
from .base import BaseFileParser
|
||||
|
||||
|
||||
class ExcelFileParser(BaseFileParser):
|
||||
|
||||
media_type = 'text/xlsx'
|
||||
|
||||
def generate_rows(self, stream_data):
|
||||
workbook = pyexcel.get_book(file_type='xlsx', file_content=stream_data)
|
||||
# 默认获取第一个工作表sheet
|
||||
sheet = workbook.sheet_by_index(0)
|
||||
rows = sheet.rows()
|
||||
return rows
|
||||
@@ -1,6 +1,7 @@
|
||||
from rest_framework import renderers
|
||||
|
||||
from .csv import *
|
||||
from .excel import *
|
||||
|
||||
|
||||
class PassthroughRenderer(renderers.BaseRenderer):
|
||||
|
||||
134
apps/common/drf/renders/base.py
Normal file
134
apps/common/drf/renders/base.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import abc
|
||||
from datetime import datetime
|
||||
from rest_framework.renderers import BaseRenderer
|
||||
from rest_framework.utils import encoders, json
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class BaseFileRenderer(BaseRenderer):
|
||||
# 渲染模版标识, 导入、导出、更新模版: ['import', 'update', 'export']
|
||||
template = 'export'
|
||||
serializer = None
|
||||
|
||||
@staticmethod
|
||||
def _check_validation_data(data):
|
||||
detail_key = "detail"
|
||||
if detail_key in data:
|
||||
return False
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _json_format_response(response_data):
|
||||
return json.dumps(response_data)
|
||||
|
||||
def set_response_disposition(self, response):
|
||||
serializer = self.serializer
|
||||
if response and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, "model"):
|
||||
filename_prefix = serializer.Meta.model.__name__.lower()
|
||||
else:
|
||||
filename_prefix = 'download'
|
||||
now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
filename = "{}_{}.{}".format(filename_prefix, now, self.format)
|
||||
disposition = 'attachment; filename="{}"'.format(filename)
|
||||
response['Content-Disposition'] = disposition
|
||||
|
||||
def get_rendered_fields(self):
|
||||
fields = self.serializer.fields
|
||||
if self.template == 'import':
|
||||
return [v for k, v in fields.items() if not v.read_only and k != "org_id" and k != 'id']
|
||||
elif self.template == 'update':
|
||||
return [v for k, v in fields.items() if not v.read_only and k != "org_id"]
|
||||
else:
|
||||
return [v for k, v in fields.items() if not v.write_only and k != "org_id"]
|
||||
|
||||
@staticmethod
|
||||
def get_column_titles(render_fields):
|
||||
return [
|
||||
'*{}'.format(field.label) if field.required else str(field.label)
|
||||
for field in render_fields
|
||||
]
|
||||
|
||||
def process_data(self, data):
|
||||
results = data['results'] if 'results' in data else data
|
||||
|
||||
if isinstance(results, dict):
|
||||
results = [results]
|
||||
|
||||
if self.template == 'import':
|
||||
results = [results[0]] if results else results
|
||||
|
||||
else:
|
||||
# 限制数据数量
|
||||
results = results[:10000]
|
||||
# 会将一些 UUID 字段转化为 string
|
||||
results = json.loads(json.dumps(results, cls=encoders.JSONEncoder))
|
||||
return results
|
||||
|
||||
@staticmethod
|
||||
def generate_rows(data, render_fields):
|
||||
for item in data:
|
||||
row = []
|
||||
for field in render_fields:
|
||||
value = item.get(field.field_name)
|
||||
value = str(value) if value else ''
|
||||
row.append(value)
|
||||
yield row
|
||||
|
||||
@abc.abstractmethod
|
||||
def initial_writer(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def write_column_titles(self, column_titles):
|
||||
self.write_row(column_titles)
|
||||
|
||||
def write_rows(self, rows):
|
||||
for row in rows:
|
||||
self.write_row(row)
|
||||
|
||||
@abc.abstractmethod
|
||||
def write_row(self, row):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_rendered_value(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def render(self, data, accepted_media_type=None, renderer_context=None):
|
||||
if data is None:
|
||||
return bytes()
|
||||
|
||||
if not self._check_validation_data(data):
|
||||
return self._json_format_response(data)
|
||||
|
||||
try:
|
||||
renderer_context = renderer_context or {}
|
||||
request = renderer_context['request']
|
||||
response = renderer_context['response']
|
||||
view = renderer_context['view']
|
||||
self.template = request.query_params.get('template', 'export')
|
||||
self.serializer = view.get_serializer()
|
||||
self.set_response_disposition(response)
|
||||
except Exception as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
value = 'The resource not support export!'.encode('utf-8')
|
||||
return value
|
||||
|
||||
try:
|
||||
rendered_fields = self.get_rendered_fields()
|
||||
column_titles = self.get_column_titles(rendered_fields)
|
||||
data = self.process_data(data)
|
||||
rows = self.generate_rows(data, rendered_fields)
|
||||
self.initial_writer()
|
||||
self.write_column_titles(column_titles)
|
||||
self.write_rows(rows)
|
||||
value = self.get_rendered_value()
|
||||
except Exception as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
value = 'Render error! ({})'.format(self.media_type).encode('utf-8')
|
||||
return value
|
||||
|
||||
return value
|
||||
|
||||
@@ -1,83 +1,30 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
#
|
||||
|
||||
import unicodecsv
|
||||
import codecs
|
||||
from datetime import datetime
|
||||
|
||||
import unicodecsv
|
||||
from six import BytesIO
|
||||
from rest_framework.renderers import BaseRenderer
|
||||
from rest_framework.utils import encoders, json
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__file__)
|
||||
from .base import BaseFileRenderer
|
||||
|
||||
|
||||
class JMSCSVRender(BaseRenderer):
|
||||
|
||||
class CSVFileRenderer(BaseFileRenderer):
|
||||
media_type = 'text/csv'
|
||||
format = 'csv'
|
||||
|
||||
@staticmethod
|
||||
def _get_show_fields(fields, template):
|
||||
if template == 'import':
|
||||
return [v for k, v in fields.items() if not v.read_only and k != "org_id" and k != 'id']
|
||||
elif template == 'update':
|
||||
return [v for k, v in fields.items() if not v.read_only and k != "org_id"]
|
||||
else:
|
||||
return [v for k, v in fields.items() if not v.write_only and k != "org_id"]
|
||||
writer = None
|
||||
buffer = None
|
||||
|
||||
@staticmethod
|
||||
def _gen_table(data, fields):
|
||||
data = data[:10000]
|
||||
yield ['*{}'.format(f.label) if f.required else f.label for f in fields]
|
||||
def initial_writer(self):
|
||||
csv_buffer = BytesIO()
|
||||
csv_buffer.write(codecs.BOM_UTF8)
|
||||
csv_writer = unicodecsv.writer(csv_buffer, encoding='utf-8')
|
||||
self.buffer = csv_buffer
|
||||
self.writer = csv_writer
|
||||
|
||||
for item in data:
|
||||
row = [item.get(f.field_name) for f in fields]
|
||||
yield row
|
||||
|
||||
def set_response_disposition(self, serializer, context):
|
||||
response = context.get('response')
|
||||
if response and hasattr(serializer, 'Meta') and \
|
||||
hasattr(serializer.Meta, "model"):
|
||||
model_name = serializer.Meta.model.__name__.lower()
|
||||
now = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
filename = "{}_{}.csv".format(model_name, now)
|
||||
disposition = 'attachment; filename="{}"'.format(filename)
|
||||
response['Content-Disposition'] = disposition
|
||||
|
||||
def render(self, data, media_type=None, renderer_context=None):
|
||||
renderer_context = renderer_context or {}
|
||||
request = renderer_context['request']
|
||||
template = request.query_params.get('template', 'export')
|
||||
view = renderer_context['view']
|
||||
|
||||
if isinstance(data, dict):
|
||||
data = data.get("results", [])
|
||||
|
||||
if template == 'import':
|
||||
data = [data[0]] if data else data
|
||||
|
||||
data = json.loads(json.dumps(data, cls=encoders.JSONEncoder))
|
||||
|
||||
try:
|
||||
serializer = view.get_serializer()
|
||||
self.set_response_disposition(serializer, renderer_context)
|
||||
except Exception as e:
|
||||
logger.debug(e, exc_info=True)
|
||||
value = 'The resource not support export!'.encode('utf-8')
|
||||
else:
|
||||
fields = serializer.fields
|
||||
show_fields = self._get_show_fields(fields, template)
|
||||
table = self._gen_table(data, show_fields)
|
||||
|
||||
csv_buffer = BytesIO()
|
||||
csv_buffer.write(codecs.BOM_UTF8)
|
||||
csv_writer = unicodecsv.writer(csv_buffer, encoding='utf-8')
|
||||
for row in table:
|
||||
csv_writer.writerow(row)
|
||||
|
||||
value = csv_buffer.getvalue()
|
||||
def write_row(self, row):
|
||||
self.writer.writerow(row)
|
||||
|
||||
def get_rendered_value(self):
|
||||
value = self.buffer.getvalue()
|
||||
return value
|
||||
|
||||
31
apps/common/drf/renders/excel.py
Normal file
31
apps/common/drf/renders/excel.py
Normal file
@@ -0,0 +1,31 @@
|
||||
from openpyxl import Workbook
|
||||
from openpyxl.writer.excel import save_virtual_workbook
|
||||
from openpyxl.cell.cell import ILLEGAL_CHARACTERS_RE
|
||||
|
||||
from .base import BaseFileRenderer
|
||||
|
||||
|
||||
class ExcelFileRenderer(BaseFileRenderer):
|
||||
media_type = "application/xlsx"
|
||||
format = "xlsx"
|
||||
|
||||
wb = None
|
||||
ws = None
|
||||
row_count = 0
|
||||
|
||||
def initial_writer(self):
|
||||
self.wb = Workbook()
|
||||
self.ws = self.wb.active
|
||||
|
||||
def write_row(self, row):
|
||||
self.row_count += 1
|
||||
column_count = 0
|
||||
for cell_value in row:
|
||||
# 处理非法字符
|
||||
column_count += 1
|
||||
cell_value = ILLEGAL_CHARACTERS_RE.sub(r'', cell_value)
|
||||
self.ws.cell(row=self.row_count, column=column_count, value=cell_value)
|
||||
|
||||
def get_rendered_value(self):
|
||||
value = save_virtual_workbook(self.wb)
|
||||
return value
|
||||
@@ -33,3 +33,9 @@ class Timeout(JMSException):
|
||||
class M2MReverseNotAllowed(JMSException):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
default_detail = _('M2M reverse not allowed')
|
||||
|
||||
|
||||
class ReferencedByOthers(JMSException):
|
||||
status_code = status.HTTP_400_BAD_REQUEST
|
||||
default_code = 'referenced_by_others'
|
||||
default_detail = _('Is referenced by other objects and cannot be deleted')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import json
|
||||
|
||||
from django import forms
|
||||
from django.utils import six
|
||||
import six
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.utils.translation import ugettext as _
|
||||
from ..utils import signer
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import force_text
|
||||
|
||||
from ..utils import signer, aes_crypto, aes_ecb_crypto
|
||||
from ..utils import signer, crypto
|
||||
|
||||
|
||||
__all__ = [
|
||||
@@ -31,7 +31,7 @@ class JsonMixin:
|
||||
def json_encode(data):
|
||||
return json.dumps(data)
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
def from_db_value(self, value, expression, connection, context=None):
|
||||
if value is None:
|
||||
return value
|
||||
return self.json_decode(value)
|
||||
@@ -54,7 +54,7 @@ class JsonMixin:
|
||||
class JsonTypeMixin(JsonMixin):
|
||||
tp = dict
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
def from_db_value(self, value, expression, connection, context=None):
|
||||
value = super().from_db_value(value, expression, connection, context)
|
||||
if not isinstance(value, self.tp):
|
||||
value = self.tp()
|
||||
@@ -116,27 +116,12 @@ class EncryptMixin:
|
||||
def decrypt_from_signer(self, value):
|
||||
return signer.unsign(value) or ''
|
||||
|
||||
def decrypt_from_aes(self, value):
|
||||
"""
|
||||
先尝试使用GCM模式解密,如果解不开,再尝试使用原来的ECB模式解密
|
||||
"""
|
||||
try:
|
||||
return aes_crypto.decrypt(value)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return aes_ecb_crypto.decrypt(value)
|
||||
except (TypeError, ValueError, UnicodeDecodeError):
|
||||
pass
|
||||
|
||||
def from_db_value(self, value, expression, connection, context):
|
||||
def from_db_value(self, value, expression, connection, context=None):
|
||||
if value is None:
|
||||
return value
|
||||
value = force_text(value)
|
||||
|
||||
# 优先采用 aes 解密
|
||||
plain_value = self.decrypt_from_aes(value)
|
||||
plain_value = crypto.decrypt(value)
|
||||
|
||||
# 如果没有解开,使用原来的signer解密
|
||||
if not plain_value:
|
||||
@@ -158,7 +143,7 @@ class EncryptMixin:
|
||||
value = sp.get_prep_value(value)
|
||||
value = force_text(value)
|
||||
# 替换新的加密方式
|
||||
return aes_crypto.encrypt(value)
|
||||
return crypto.encrypt(value)
|
||||
|
||||
|
||||
class EncryptTextField(EncryptMixin, models.TextField):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
|
||||
from rest_framework import serializers
|
||||
from django.utils import six
|
||||
import six
|
||||
|
||||
|
||||
__all__ = [
|
||||
|
||||
@@ -11,8 +11,6 @@ import time
|
||||
import ipaddress
|
||||
import psutil
|
||||
|
||||
from .timezone import dt_formater
|
||||
|
||||
|
||||
UUID_PATTERN = re.compile(r'\w{8}(-\w{4}){3}-\w{12}')
|
||||
ipip_db = None
|
||||
@@ -43,7 +41,7 @@ def timesince(dt, since='', default="just now"):
|
||||
3 days, 5 hours.
|
||||
"""
|
||||
|
||||
if since is '':
|
||||
if not since:
|
||||
since = datetime.datetime.utcnow()
|
||||
|
||||
if since is None:
|
||||
|
||||
@@ -2,8 +2,58 @@ import base64
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import pad
|
||||
from Crypto.Random import get_random_bytes
|
||||
from gmssl.sm4 import CryptSM4, SM4_ENCRYPT, SM4_DECRYPT
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
def process_key(key):
|
||||
"""
|
||||
返回32 bytes 的key
|
||||
"""
|
||||
if not isinstance(key, bytes):
|
||||
key = bytes(key, encoding='utf-8')
|
||||
|
||||
if len(key) >= 32:
|
||||
return key[:32]
|
||||
|
||||
return pad(key, 32)
|
||||
|
||||
|
||||
class BaseCrypto:
|
||||
|
||||
def encrypt(self, text):
|
||||
return base64.urlsafe_b64encode(
|
||||
self._encrypt(bytes(text, encoding='utf8'))
|
||||
).decode('utf8')
|
||||
|
||||
def _encrypt(self, data: bytes) -> bytes:
|
||||
raise NotImplementedError
|
||||
|
||||
def decrypt(self, text):
|
||||
return self._decrypt(
|
||||
base64.urlsafe_b64decode(bytes(text, encoding='utf8'))
|
||||
).decode('utf8')
|
||||
|
||||
def _decrypt(self, data: bytes) -> bytes:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class GMSM4EcbCrypto(BaseCrypto):
|
||||
def __init__(self, key):
|
||||
self.key = process_key(key)
|
||||
self.sm4_encryptor = CryptSM4()
|
||||
self.sm4_encryptor.set_key(self.key, SM4_ENCRYPT)
|
||||
|
||||
self.sm4_decryptor = CryptSM4()
|
||||
self.sm4_decryptor.set_key(self.key, SM4_DECRYPT)
|
||||
|
||||
def _encrypt(self, data: bytes) -> bytes:
|
||||
return self.sm4_encryptor.crypt_ecb(data)
|
||||
|
||||
def _decrypt(self, data: bytes) -> bytes:
|
||||
return self.sm4_decryptor.crypt_ecb(data)
|
||||
|
||||
|
||||
class AESCrypto:
|
||||
@@ -52,20 +102,7 @@ class AESCryptoGCM:
|
||||
"""
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = self.process_key(key)
|
||||
|
||||
@staticmethod
|
||||
def process_key(key):
|
||||
"""
|
||||
返回32 bytes 的key
|
||||
"""
|
||||
if not isinstance(key, bytes):
|
||||
key = bytes(key, encoding='utf-8')
|
||||
|
||||
if len(key) >= 32:
|
||||
return key[:32]
|
||||
|
||||
return pad(key, 32)
|
||||
self.key = process_key(key)
|
||||
|
||||
def encrypt(self, text):
|
||||
"""
|
||||
@@ -110,5 +147,50 @@ def get_aes_crypto(key=None, mode='GCM'):
|
||||
return a
|
||||
|
||||
|
||||
def get_gm_sm4_ecb_crypto(key=None):
|
||||
key = key or settings.SECRET_KEY
|
||||
return GMSM4EcbCrypto(key)
|
||||
|
||||
|
||||
aes_ecb_crypto = get_aes_crypto(mode='ECB')
|
||||
aes_crypto = get_aes_crypto(mode='GCM')
|
||||
gm_sm4_ecb_crypto = get_gm_sm4_ecb_crypto()
|
||||
|
||||
|
||||
class Crypto:
|
||||
cryptoes = {
|
||||
'aes_ecb': aes_ecb_crypto,
|
||||
'aes_gcm': aes_crypto,
|
||||
'aes': aes_crypto,
|
||||
'gm_sm4_ecb': gm_sm4_ecb_crypto,
|
||||
'gm': gm_sm4_ecb_crypto,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
cryptoes = self.__class__.cryptoes.copy()
|
||||
crypto = cryptoes.pop(settings.SECURITY_DATA_CRYPTO_ALGO, None)
|
||||
if crypto is None:
|
||||
raise ImproperlyConfigured(
|
||||
f'Crypto method not supported {settings.SECURITY_DATA_CRYPTO_ALGO}'
|
||||
)
|
||||
self.cryptoes = [crypto, *cryptoes.values()]
|
||||
|
||||
@property
|
||||
def encryptor(self):
|
||||
return self.cryptoes[0]
|
||||
|
||||
def encrypt(self, text):
|
||||
return self.encryptor.encrypt(text)
|
||||
|
||||
def decrypt(self, text):
|
||||
for decryptor in self.cryptoes:
|
||||
try:
|
||||
origin_text = decryptor.decrypt(text)
|
||||
if origin_text:
|
||||
# 有时不同算法解密不报错,但是返回空字符串
|
||||
return origin_text
|
||||
except (TypeError, ValueError, UnicodeDecodeError, IndexError):
|
||||
continue
|
||||
|
||||
|
||||
crypto = Crypto()
|
||||
|
||||
@@ -10,13 +10,15 @@ UUID_PATTERN = re.compile(r'[0-9a-zA-Z\-]{36}')
|
||||
|
||||
|
||||
def reverse(view_name, urlconf=None, args=None, kwargs=None,
|
||||
current_app=None, external=False):
|
||||
current_app=None, external=False, api_to_ui=False):
|
||||
url = dj_reverse(view_name, urlconf=urlconf, args=args,
|
||||
kwargs=kwargs, current_app=current_app)
|
||||
|
||||
if external:
|
||||
site_url = settings.SITE_URL
|
||||
url = site_url.strip('/') + url
|
||||
if api_to_ui:
|
||||
url = url.replace('api/v1', 'ui/#').rstrip('/')
|
||||
return url
|
||||
|
||||
|
||||
@@ -48,3 +50,11 @@ def union_queryset(*args, base_queryset=None):
|
||||
base_queryset = args[0].model.objects
|
||||
queryset = base_queryset.filter(id__in=queryset_id)
|
||||
return queryset
|
||||
|
||||
|
||||
def get_log_keep_day(s, defaults=200):
|
||||
try:
|
||||
days = int(getattr(settings, s))
|
||||
except ValueError:
|
||||
days = defaults
|
||||
return days
|
||||
|
||||
10
apps/common/utils/inspect.py
Normal file
10
apps/common/utils/inspect.py
Normal file
@@ -0,0 +1,10 @@
|
||||
import inspect
|
||||
|
||||
|
||||
def copy_function_args(func, locals_dict: dict):
|
||||
signature = inspect.signature(func)
|
||||
keys = signature.parameters.keys()
|
||||
kwargs = {}
|
||||
for k in keys:
|
||||
kwargs[k] = locals_dict.get(k)
|
||||
return kwargs
|
||||
55
apps/common/utils/lock.py
Normal file
55
apps/common/utils/lock.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from functools import wraps
|
||||
|
||||
from redis_lock import Lock as RedisLock
|
||||
from redis import Redis
|
||||
|
||||
from common.utils import get_logger
|
||||
from common.utils.inspect import copy_function_args
|
||||
from apps.jumpserver.const import CONFIG
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class AcquireFailed(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
class DistributedLock(RedisLock):
|
||||
def __init__(self, name, blocking=True, expire=60*2, auto_renewal=True):
|
||||
"""
|
||||
使用 redis 构造的分布式锁
|
||||
|
||||
:param name:
|
||||
锁的名字,要全局唯一
|
||||
:param blocking:
|
||||
该参数只在锁作为装饰器或者 `with` 时有效。
|
||||
:param expire:
|
||||
锁的过期时间,注意不一定是锁到这个时间就释放了,分两种情况
|
||||
当 `auto_renewal=False` 时,锁会释放
|
||||
当 `auto_renewal=True` 时,如果过期之前程序还没释放锁,我们会延长锁的存活时间。
|
||||
这里的作用是防止程序意外终止没有释放锁,导致死锁。
|
||||
"""
|
||||
self.kwargs_copy = copy_function_args(self.__init__, locals())
|
||||
redis = Redis(host=CONFIG.REDIS_HOST, port=CONFIG.REDIS_PORT, password=CONFIG.REDIS_PASSWORD)
|
||||
super().__init__(redis_client=redis, name=name, expire=expire, auto_renewal=auto_renewal)
|
||||
self._blocking = blocking
|
||||
|
||||
def __enter__(self):
|
||||
acquired = self.acquire(blocking=self._blocking)
|
||||
if self._blocking and not acquired:
|
||||
raise EnvironmentError("Lock wasn't acquired, but blocking=True")
|
||||
if not acquired:
|
||||
raise AcquireFailed
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type=None, exc_value=None, traceback=None):
|
||||
self.release()
|
||||
|
||||
def __call__(self, func):
|
||||
@wraps(func)
|
||||
def inner(*args, **kwds):
|
||||
# 要创建一个新的锁对象
|
||||
with self.__class__(**self.kwargs_copy):
|
||||
return func(*args, **kwds)
|
||||
|
||||
return inner
|
||||
5
apps/common/utils/strings.py
Normal file
5
apps/common/utils/strings.py
Normal file
@@ -0,0 +1,5 @@
|
||||
import re
|
||||
|
||||
|
||||
def no_special_chars(s):
|
||||
return bool(re.match(r'\w+$', s))
|
||||
@@ -2,19 +2,21 @@
|
||||
#
|
||||
from django.core.validators import RegexValidator
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
from rest_framework.validators import (
|
||||
UniqueTogetherValidator, ValidationError
|
||||
)
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.utils.strings import no_special_chars
|
||||
|
||||
|
||||
alphanumeric = RegexValidator(r'^[0-9a-zA-Z_@\-\.]*$', _('Special char not allowed'))
|
||||
|
||||
|
||||
class ProjectUniqueValidator(UniqueTogetherValidator):
|
||||
def __call__(self, attrs):
|
||||
def __call__(self, attrs, serializer):
|
||||
try:
|
||||
super().__call__(attrs)
|
||||
super().__call__(attrs, serializer)
|
||||
except ValidationError as e:
|
||||
errors = {}
|
||||
for field in self.fields:
|
||||
@@ -22,3 +24,11 @@ class ProjectUniqueValidator(UniqueTogetherValidator):
|
||||
continue
|
||||
errors[field] = _('This field must be unique.')
|
||||
raise ValidationError(errors)
|
||||
|
||||
|
||||
class NoSpecialChars:
|
||||
def __call__(self, value):
|
||||
if not no_special_chars(value):
|
||||
raise serializers.ValidationError(
|
||||
_("Should not contains special characters")
|
||||
)
|
||||
|
||||
@@ -2,13 +2,14 @@ from django.core.cache import cache
|
||||
from django.utils import timezone
|
||||
from django.utils.timesince import timesince
|
||||
from django.db.models import Count, Max
|
||||
from django.http.response import JsonResponse
|
||||
from django.http.response import JsonResponse, HttpResponse
|
||||
from rest_framework.views import APIView
|
||||
from collections import Counter
|
||||
|
||||
from users.models import User
|
||||
from assets.models import Asset
|
||||
from terminal.models import Session
|
||||
from terminal.utils import ComponentsPrometheusMetricsUtil
|
||||
from orgs.utils import current_org
|
||||
from common.permissions import IsOrgAdmin, IsOrgAuditor
|
||||
from common.utils import lazyproperty
|
||||
@@ -305,3 +306,11 @@ class IndexApi(TotalCountMixin, DatesLoginMetricMixin, APIView):
|
||||
return JsonResponse(data, status=200)
|
||||
|
||||
|
||||
class PrometheusMetricsApi(APIView):
|
||||
permission_classes = ()
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
util = ComponentsPrometheusMetricsUtil()
|
||||
metrics_text = util.get_prometheus_metrics_text()
|
||||
return HttpResponse(metrics_text, content_type='text/plain; version=0.0.4; charset=utf-8')
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ import json
|
||||
import yaml
|
||||
from importlib import import_module
|
||||
from django.urls import reverse_lazy
|
||||
from django.contrib.staticfiles.templatetags.staticfiles import static
|
||||
from django.templatetags.static import static
|
||||
from urllib.parse import urljoin, urlparse
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
@@ -224,7 +224,7 @@ class Config(dict):
|
||||
'TERMINAL_HEARTBEAT_INTERVAL': 20,
|
||||
'TERMINAL_ASSET_LIST_SORT_BY': 'hostname',
|
||||
'TERMINAL_ASSET_LIST_PAGE_SIZE': 'auto',
|
||||
'TERMINAL_SESSION_KEEP_DURATION': 9999,
|
||||
'TERMINAL_SESSION_KEEP_DURATION': 200,
|
||||
'TERMINAL_HOST_KEY': '',
|
||||
'TERMINAL_TELNET_REGEX': '',
|
||||
'TERMINAL_COMMAND_STORAGE': {},
|
||||
@@ -244,12 +244,18 @@ class Config(dict):
|
||||
'SECURITY_PASSWORD_SPECIAL_CHAR': False,
|
||||
'SECURITY_LOGIN_CHALLENGE_ENABLED': False,
|
||||
'SECURITY_LOGIN_CAPTCHA_ENABLED': True,
|
||||
'SECURITY_DATA_CRYPTO_ALGO': 'aes',
|
||||
'SECURITY_INSECURE_COMMAND': False,
|
||||
'SECURITY_INSECURE_COMMAND_LEVEL': 5,
|
||||
'SECURITY_INSECURE_COMMAND_EMAIL_RECEIVER': '',
|
||||
|
||||
'HTTP_BIND_HOST': '0.0.0.0',
|
||||
'HTTP_LISTEN_PORT': 8080,
|
||||
'WS_LISTEN_PORT': 8070,
|
||||
'LOGIN_LOG_KEEP_DAYS': 9999,
|
||||
'TASK_LOG_KEEP_DAYS': 10,
|
||||
'LOGIN_LOG_KEEP_DAYS': 200,
|
||||
'TASK_LOG_KEEP_DAYS': 90,
|
||||
'OPERATE_LOG_KEEP_DAYS': 200,
|
||||
'FTP_LOG_KEEP_DAYS': 200,
|
||||
'ASSETS_PERM_CACHE_TIME': 3600 * 24,
|
||||
'SECURITY_MFA_VERIFY_TTL': 3600,
|
||||
'ASSETS_PERM_CACHE_ENABLE': HAS_XPACK,
|
||||
@@ -366,7 +372,7 @@ class Config(dict):
|
||||
tp = type(default_value)
|
||||
# 对bool特殊处理
|
||||
if tp is bool and isinstance(v, str):
|
||||
if v in ("true", "True", "1"):
|
||||
if v.lower() in ("true", "1"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.contrib.staticfiles.templatetags.staticfiles import static
|
||||
from django.templatetags.static import static
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
@@ -64,6 +64,7 @@ INSTALLED_APPS = [
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
'django.forms',
|
||||
]
|
||||
|
||||
|
||||
@@ -242,6 +243,9 @@ CACHES = {
|
||||
'host': CONFIG.REDIS_HOST,
|
||||
'port': CONFIG.REDIS_PORT,
|
||||
'db': CONFIG.REDIS_DB_CACHE,
|
||||
},
|
||||
'OPTIONS': {
|
||||
"REDIS_CLIENT_KWARGS": {"health_check_interval": 30}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,6 +54,10 @@ SECURITY_VIEW_AUTH_NEED_MFA = CONFIG.SECURITY_VIEW_AUTH_NEED_MFA
|
||||
SECURITY_SERVICE_ACCOUNT_REGISTRATION = DYNAMIC.SECURITY_SERVICE_ACCOUNT_REGISTRATION
|
||||
SECURITY_LOGIN_CAPTCHA_ENABLED = CONFIG.SECURITY_LOGIN_CAPTCHA_ENABLED
|
||||
SECURITY_LOGIN_CHALLENGE_ENABLED = CONFIG.SECURITY_LOGIN_CHALLENGE_ENABLED
|
||||
SECURITY_DATA_CRYPTO_ALGO = CONFIG.SECURITY_DATA_CRYPTO_ALGO
|
||||
SECURITY_INSECURE_COMMAND = DYNAMIC.SECURITY_INSECURE_COMMAND
|
||||
SECURITY_INSECURE_COMMAND_LEVEL = CONFIG.SECURITY_INSECURE_COMMAND_LEVEL
|
||||
SECURITY_INSECURE_COMMAND_EMAIL_RECEIVER = DYNAMIC.SECURITY_INSECURE_COMMAND_EMAIL_RECEIVER
|
||||
|
||||
# Terminal other setting
|
||||
TERMINAL_PASSWORD_AUTH = DYNAMIC.TERMINAL_PASSWORD_AUTH
|
||||
|
||||
@@ -11,14 +11,17 @@ REST_FRAMEWORK = {
|
||||
),
|
||||
'DEFAULT_RENDERER_CLASSES': (
|
||||
'rest_framework.renderers.JSONRenderer',
|
||||
'rest_framework.renderers.BrowsableAPIRenderer',
|
||||
'common.drf.renders.JMSCSVRender',
|
||||
# 'rest_framework.renderers.BrowsableAPIRenderer',
|
||||
'common.drf.renders.CSVFileRenderer',
|
||||
'common.drf.renders.ExcelFileRenderer',
|
||||
|
||||
),
|
||||
'DEFAULT_PARSER_CLASSES': (
|
||||
'rest_framework.parsers.JSONParser',
|
||||
'rest_framework.parsers.FormParser',
|
||||
'rest_framework.parsers.MultiPartParser',
|
||||
'common.drf.parsers.JMSCSVParser',
|
||||
'common.drf.parsers.CSVFileParser',
|
||||
'common.drf.parsers.ExcelFileParser',
|
||||
'rest_framework.parsers.FileUploadParser',
|
||||
),
|
||||
'DEFAULT_AUTHENTICATION_CLASSES': (
|
||||
@@ -61,10 +64,10 @@ SWAGGER_SETTINGS = {
|
||||
|
||||
|
||||
# Captcha settings, more see https://django-simple-captcha.readthedocs.io/en/latest/advanced.html
|
||||
CAPTCHA_IMAGE_SIZE = (80, 33)
|
||||
CAPTCHA_IMAGE_SIZE = (140, 34)
|
||||
CAPTCHA_FOREGROUND_COLOR = '#001100'
|
||||
CAPTCHA_NOISE_FUNCTIONS = ('captcha.helpers.noise_dots',)
|
||||
CAPTCHA_TEST_MODE = CONFIG.CAPTCHA_TEST_MODE
|
||||
CAPTCHA_CHALLENGE_FUNCT = 'captcha.helpers.math_challenge'
|
||||
|
||||
# Django bootstrap3 setting, more see http://django-bootstrap3.readthedocs.io/en/latest/settings.html
|
||||
BOOTSTRAP3 = {
|
||||
|
||||
@@ -5,6 +5,8 @@ from ..const import PROJECT_DIR, CONFIG
|
||||
|
||||
LOG_DIR = os.path.join(PROJECT_DIR, 'logs')
|
||||
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver.log')
|
||||
DRF_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'drf_exception.log')
|
||||
UNEXPECTED_EXCEPTION_LOG_FILE = os.path.join(LOG_DIR, 'unexpected_exception.log')
|
||||
ANSIBLE_LOG_FILE = os.path.join(LOG_DIR, 'ansible.log')
|
||||
GUNICORN_LOG_FILE = os.path.join(LOG_DIR, 'gunicorn.log')
|
||||
LOG_LEVEL = CONFIG.LOG_LEVEL
|
||||
@@ -20,6 +22,10 @@ LOGGING = {
|
||||
'datefmt': '%Y-%m-%d %H:%M:%S',
|
||||
'format': '%(asctime)s [%(module)s %(levelname)s] %(message)s',
|
||||
},
|
||||
'exception': {
|
||||
'datefmt': '%Y-%m-%d %H:%M:%S',
|
||||
'format': '\n%(asctime)s [%(levelname)s] %(message)s',
|
||||
},
|
||||
'simple': {
|
||||
'format': '%(levelname)s %(message)s'
|
||||
},
|
||||
@@ -58,6 +64,24 @@ LOGGING = {
|
||||
'backupCount': 7,
|
||||
'filename': ANSIBLE_LOG_FILE,
|
||||
},
|
||||
'drf_exception': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'formatter': 'exception',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'filename': DRF_EXCEPTION_LOG_FILE,
|
||||
},
|
||||
'unexpected_exception': {
|
||||
'encoding': 'utf8',
|
||||
'level': 'DEBUG',
|
||||
'class': 'logging.handlers.RotatingFileHandler',
|
||||
'formatter': 'exception',
|
||||
'maxBytes': 1024 * 1024 * 100,
|
||||
'backupCount': 7,
|
||||
'filename': UNEXPECTED_EXCEPTION_LOG_FILE,
|
||||
},
|
||||
'syslog': {
|
||||
'level': 'INFO',
|
||||
'class': 'logging.NullHandler',
|
||||
@@ -84,6 +108,14 @@ LOGGING = {
|
||||
'handlers': ['console', 'file'],
|
||||
'level': LOG_LEVEL,
|
||||
},
|
||||
'drf_exception': {
|
||||
'handlers': ['console', 'drf_exception'],
|
||||
'level': LOG_LEVEL,
|
||||
},
|
||||
'unexpected_exception': {
|
||||
'handlers': ['unexpected_exception'],
|
||||
'level': LOG_LEVEL,
|
||||
},
|
||||
'ops.ansible_api': {
|
||||
'handlers': ['console', 'ansible_logs'],
|
||||
'level': LOG_LEVEL,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user