mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-16 00:52:41 +00:00
Compare commits
203 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7b10a07c09 | ||
|
|
baa75dc735 | ||
|
|
8a9f0436b8 | ||
|
|
2c2c3eb21a | ||
|
|
18681d1f50 | ||
|
|
86ef984c02 | ||
|
|
e4d8ce097a | ||
|
|
ae68241812 | ||
|
|
13d4177531 | ||
|
|
641e75a905 | ||
|
|
a2d6e41816 | ||
|
|
6cd3672604 | ||
|
|
3c3c1499b7 | ||
|
|
e29e51121d | ||
|
|
fabee37e9e | ||
|
|
2994ea6f68 | ||
|
|
644eada8a1 | ||
|
|
000a3038e1 | ||
|
|
9c8635b230 | ||
|
|
e428eb351b | ||
|
|
1275087f19 | ||
|
|
311c01242b | ||
|
|
bab5b67c52 | ||
|
|
3eb0b768a6 | ||
|
|
6dcc74a388 | ||
|
|
2b15fc5e8b | ||
|
|
df655f304a | ||
|
|
25223719cb | ||
|
|
814dbeb749 | ||
|
|
630bb56601 | ||
|
|
496b72aaee | ||
|
|
b57e943990 | ||
|
|
b4c1dd2944 | ||
|
|
9ede3670a7 | ||
|
|
2a29cd0e70 | ||
|
|
15ac81a422 | ||
|
|
eb5a53b91b | ||
|
|
4dd72b109f | ||
|
|
2fcbfe9f21 | ||
|
|
e80a0e41ba | ||
|
|
7cdba3ef38 | ||
|
|
2d6e815b3d | ||
|
|
38642024be | ||
|
|
257ee205ac | ||
|
|
a9620a3cbe | ||
|
|
769e7dc8a0 | ||
|
|
4b961a626b | ||
|
|
2a70449411 | ||
|
|
653a6752b6 | ||
|
|
32255c6077 | ||
|
|
7a708156ee | ||
|
|
b72a446bbd | ||
|
|
219fad9b62 | ||
|
|
6c1c8b241e | ||
|
|
a4d0e3fd17 | ||
|
|
af44ffab0a | ||
|
|
a09b7b29e2 | ||
|
|
8f67922c80 | ||
|
|
f1db5d6f44 | ||
|
|
33ea5eb41f | ||
|
|
48bcbc6c53 | ||
|
|
3e090eb701 | ||
|
|
6ac956c626 | ||
|
|
edb2d1bd7b | ||
|
|
81b4909016 | ||
|
|
f6f1be423c | ||
|
|
fae5392a03 | ||
|
|
d5224968bc | ||
|
|
6565f8c0a8 | ||
|
|
8df720f19e | ||
|
|
bc5494bbb0 | ||
|
|
febf08629a | ||
|
|
b6774aa749 | ||
|
|
bc668f3e9f | ||
|
|
dc56b019b1 | ||
|
|
a38624d198 | ||
|
|
ca026040fe | ||
|
|
88b9a4d693 | ||
|
|
4d15e46ceb | ||
|
|
55575e9f7f | ||
|
|
98c9cddcbf | ||
|
|
9f67ba573c | ||
|
|
533f13c634 | ||
|
|
c66b1db784 | ||
|
|
d03ba7c391 | ||
|
|
6544f8ade8 | ||
|
|
ac5991fc43 | ||
|
|
9b2b71dddc | ||
|
|
e18e019460 | ||
|
|
ef1875d9b5 | ||
|
|
0b7552a6ee | ||
|
|
45425b11d2 | ||
|
|
fda3e6ec9b | ||
|
|
2b41486f2a | ||
|
|
59d9a3d4ec | ||
|
|
3c7ba029dd | ||
|
|
1335556272 | ||
|
|
8eab87f40d | ||
|
|
c441e5bb92 | ||
|
|
da8d78f384 | ||
|
|
83b91cb739 | ||
|
|
1afad40dd3 | ||
|
|
1358cf532f | ||
|
|
1e7f268f0c | ||
|
|
d6b5590505 | ||
|
|
79b3b31492 | ||
|
|
4f2b3fbb43 | ||
|
|
1f2db65dba | ||
|
|
006faac326 | ||
|
|
f7fee0f430 | ||
|
|
714c44fbf4 | ||
|
|
84b316e2c1 | ||
|
|
6955a3db11 | ||
|
|
d92736e624 | ||
|
|
9d0da64ea1 | ||
|
|
b9e1d6093e | ||
|
|
c3820b30b8 | ||
|
|
6955fc1734 | ||
|
|
32178b2344 | ||
|
|
e3c0518cfb | ||
|
|
438e9dee2a | ||
|
|
3c9239eb09 | ||
|
|
81fb080c67 | ||
|
|
6cf05435bf | ||
|
|
65718c5a84 | ||
|
|
27daebbe1b | ||
|
|
dce1079fdc | ||
|
|
d07db68426 | ||
|
|
6d37300a30 | ||
|
|
0c96af32c2 | ||
|
|
1c6b1b0625 | ||
|
|
4f7b4842f6 | ||
|
|
c4fef5899c | ||
|
|
5b51a8231c | ||
|
|
54417dd6d3 | ||
|
|
2c7ad90524 | ||
|
|
01fcdad489 | ||
|
|
8801003461 | ||
|
|
696397fdb0 | ||
|
|
87a24991f1 | ||
|
|
3ec93b8f04 | ||
|
|
4f1826d3ed | ||
|
|
9260f26c99 | ||
|
|
93da3e58f2 | ||
|
|
1eff33f3f7 | ||
|
|
8e89d42343 | ||
|
|
d0b0c87d3c | ||
|
|
e3ac26e377 | ||
|
|
4ea20a9103 | ||
|
|
dd57b14562 | ||
|
|
c312cdb625 | ||
|
|
85fedf0704 | ||
|
|
8b05260a6c | ||
|
|
47cb6b1ec0 | ||
|
|
79b5dff210 | ||
|
|
b08e1f6a47 | ||
|
|
2e3184cbd6 | ||
|
|
fb903e53a4 | ||
|
|
cc7220a4ad | ||
|
|
81de527e32 | ||
|
|
7ad2abe104 | ||
|
|
9a2da98bd4 | ||
|
|
eca50874f0 | ||
|
|
8f82ca9856 | ||
|
|
e193d7a942 | ||
|
|
d2429f7883 | ||
|
|
a43bb25b5a | ||
|
|
ffe3e8a70c | ||
|
|
0e7e499a1e | ||
|
|
e812e3ff89 | ||
|
|
d2eacad97b | ||
|
|
8291a81efd | ||
|
|
a91cb1afd5 | ||
|
|
2cad97065f | ||
|
|
cf18300360 | ||
|
|
3cd22f05d2 | ||
|
|
eee41008cc | ||
|
|
0fdae00722 | ||
|
|
575562c416 | ||
|
|
e2b7f67fdc | ||
|
|
d2498c0d53 | ||
|
|
01e40fd238 | ||
|
|
370ef11486 | ||
|
|
089cadeae3 | ||
|
|
6b748e5ac5 | ||
|
|
6d611bbbbd | ||
|
|
18670d493e | ||
|
|
ba38852354 | ||
|
|
64f3509c8c | ||
|
|
805c78c0de | ||
|
|
11accf8854 | ||
|
|
18f6ffe0ce | ||
|
|
6b7119ea74 | ||
|
|
efc7ca1164 | ||
|
|
a6de9bdde6 | ||
|
|
6e7074ba40 | ||
|
|
2edcb2f2d3 | ||
|
|
07e1918fa1 | ||
|
|
452b383278 | ||
|
|
ed92f10208 | ||
|
|
e8331ca708 | ||
|
|
814130204a | ||
|
|
e7dc9a2f6f |
@@ -44,8 +44,8 @@ ARG TOOLS=" \
|
||||
wget"
|
||||
|
||||
ARG APT_MIRROR=http://mirrors.ustc.edu.cn
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core \
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core-apt \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core-apt \
|
||||
sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
@@ -63,9 +63,9 @@ RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=pyproject.toml,target=/opt/jumpserver/pyproject.toml \
|
||||
set -ex \
|
||||
&& python3 -m venv /opt/py3 \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& pip install poetry -i ${PIP_MIRROR} \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& poetry install
|
||||
|
||||
FROM python:3.11-slim-bullseye
|
||||
@@ -75,8 +75,9 @@ ENV LANG=zh_CN.UTF-8 \
|
||||
|
||||
ARG DEPENDENCIES=" \
|
||||
libjpeg-dev \
|
||||
libxmlsec1-openssl \
|
||||
libx11-dev"
|
||||
libx11-dev \
|
||||
freerdp2-dev \
|
||||
libxmlsec1-openssl"
|
||||
|
||||
ARG TOOLS=" \
|
||||
ca-certificates \
|
||||
@@ -94,8 +95,8 @@ ARG TOOLS=" \
|
||||
wget"
|
||||
|
||||
ARG APT_MIRROR=http://mirrors.ustc.edu.cn
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core \
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked,id=core-apt \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked,id=core-apt \
|
||||
sed -i "s@http://.*.debian.org@${APT_MIRROR}@g" /etc/apt/sources.list \
|
||||
&& rm -f /etc/apt/apt.conf.d/docker-clean \
|
||||
&& ln -sf /usr/share/zoneinfo/Asia/Shanghai /etc/localtime \
|
||||
@@ -118,7 +119,6 @@ ARG VERSION
|
||||
ENV VERSION=$VERSION
|
||||
|
||||
VOLUME /opt/jumpserver/data
|
||||
VOLUME /opt/jumpserver/logs
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
|
||||
@@ -94,7 +94,8 @@ JumpServer 堡垒机帮助企业以更安全的方式管控和登录各种类型
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer 字符协议 Connector 项目 |
|
||||
| [Lion](https://github.com/jumpserver/lion-release) | <a href="https://github.com/jumpserver/lion-release/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion-release.svg" /></a> | JumpServer 图形协议 Connector 项目,依赖 [Apache Guacamole](https://guacamole.apache.org/) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer RDP 代理 Connector 项目 |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer 远程应用 Connector 项目 |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer 远程应用 Connector 项目 (Windows) |
|
||||
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-私有发布-red" /> | JumpServer 远程应用 Connector 项目 (Linux) |
|
||||
| [Magnus](https://github.com/jumpserver/magnus-release) | <a href="https://github.com/jumpserver/magnus-release/releases"><img alt="Magnus release" src="https://img.shields.io/github/release/jumpserver/magnus-release.svg" /> | JumpServer 数据库代理 Connector 项目 |
|
||||
| [Chen](https://github.com/jumpserver/chen-release) | <a href="https://github.com/jumpserver/chen-release/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen-release.svg" /> | JumpServer Web DB 项目,替代原来的 OmniDB |
|
||||
| [Kael](https://github.com/jumpserver/kael) | <a href="https://github.com/jumpserver/kael/releases"><img alt="Kael release" src="https://img.shields.io/github/release/jumpserver/kael.svg" /> | JumpServer 连接 GPT 资产的组件项目 |
|
||||
@@ -112,7 +113,7 @@ JumpServer是一款安全产品,请参考 [基本安全建议](https://docs.ju
|
||||
|
||||
## License & Copyright
|
||||
|
||||
Copyright (c) 2014-2023 飞致云 FIT2CLOUD, All rights reserved.
|
||||
Copyright (c) 2014-2024 飞致云 FIT2CLOUD, All rights reserved.
|
||||
|
||||
Licensed under The GNU General Public License version 3 (GPLv3) (the "License"); you may not use this file except in
|
||||
compliance with the License. You may obtain a copy of the License at
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
from rest_framework.generics import CreateAPIView
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.tasks import verify_accounts_connectivity_task, push_accounts_to_assets_task
|
||||
from accounts.permissions import AccountTaskActionPermission
|
||||
from accounts.tasks import (
|
||||
remove_accounts_task, verify_accounts_connectivity_task, push_accounts_to_assets_task
|
||||
)
|
||||
from assets.exceptions import NotSupportedTemporarilyError
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
|
||||
__all__ = [
|
||||
'AccountsTaskCreateAPI',
|
||||
@@ -12,16 +15,16 @@ __all__ = [
|
||||
|
||||
class AccountsTaskCreateAPI(CreateAPIView):
|
||||
serializer_class = serializers.AccountTaskSerializer
|
||||
permission_classes = (AccountTaskActionPermission,)
|
||||
|
||||
def check_permissions(self, request):
|
||||
act = request.data.get('action')
|
||||
if act == 'push':
|
||||
code = 'accounts.push_account'
|
||||
else:
|
||||
code = 'accounts.verify_account'
|
||||
has = request.user.has_perm(code)
|
||||
if not has:
|
||||
self.permission_denied(request)
|
||||
def get_permissions(self):
|
||||
act = self.request.data.get('action')
|
||||
if act == 'remove':
|
||||
self.permission_classes = [
|
||||
AccountTaskActionPermission,
|
||||
UserConfirmation.require(ConfirmType.PASSWORD)
|
||||
]
|
||||
return super().get_permissions()
|
||||
|
||||
def perform_create(self, serializer):
|
||||
data = serializer.validated_data
|
||||
@@ -31,6 +34,10 @@ class AccountsTaskCreateAPI(CreateAPIView):
|
||||
|
||||
if data['action'] == 'push':
|
||||
task = push_accounts_to_assets_task.delay(account_ids, params)
|
||||
elif data['action'] == 'remove':
|
||||
gather_accounts = data.get('gather_accounts', [])
|
||||
gather_account_ids = [str(a.id) for a in gather_accounts]
|
||||
task = remove_accounts_task.delay(gather_account_ids)
|
||||
else:
|
||||
account = accounts[0]
|
||||
asset = account.asset
|
||||
@@ -43,9 +50,3 @@ class AccountsTaskCreateAPI(CreateAPIView):
|
||||
data["task"] = task.id
|
||||
setattr(serializer, '_data', data)
|
||||
return task
|
||||
|
||||
def get_exception_handler(self):
|
||||
def handler(e, context):
|
||||
return Response({"error": str(e)}, status=401)
|
||||
|
||||
return handler
|
||||
|
||||
@@ -3,13 +3,13 @@ import time
|
||||
from collections import defaultdict, OrderedDict
|
||||
|
||||
from django.conf import settings
|
||||
from openpyxl import Workbook
|
||||
from rest_framework import serializers
|
||||
from xlsxwriter import Workbook
|
||||
|
||||
from accounts.const.automation import AccountBackupType
|
||||
from accounts.models.automations.backup_account import AccountBackupAutomation
|
||||
from accounts.notifications import AccountBackupExecutionTaskMsg, AccountBackupByObjStorageExecutionTaskMsg
|
||||
from accounts.serializers import AccountSecretSerializer
|
||||
from accounts.models.automations.backup_account import AccountBackupAutomation
|
||||
from assets.const import AllTypes
|
||||
from common.utils.file import encrypt_and_compress_zip_file, zip_files
|
||||
from common.utils.timezone import local_now_filename, local_now_display
|
||||
@@ -144,10 +144,11 @@ class AccountBackupHandler:
|
||||
|
||||
wb = Workbook(filename)
|
||||
for sheet, data in data_map.items():
|
||||
ws = wb.create_sheet(str(sheet))
|
||||
for row in data:
|
||||
ws.append(row)
|
||||
wb.save(filename)
|
||||
ws = wb.add_worksheet(str(sheet))
|
||||
for row_index, row_data in enumerate(data):
|
||||
for col_index, col_data in enumerate(row_data):
|
||||
ws.write_string(row_index, col_index, col_data)
|
||||
wb.close()
|
||||
files.append(filename)
|
||||
timedelta = round((time.time() - time_start), 2)
|
||||
print('创建备份文件完成: 用时 {}s'.format(timedelta))
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
- hosts: custom
|
||||
gather_facts: no
|
||||
vars:
|
||||
asset_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'ssh') | map(attribute='port') | first }}"
|
||||
ansible_connection: local
|
||||
ansible_become: false
|
||||
|
||||
@@ -9,7 +8,7 @@
|
||||
- name: Test privileged account (paramiko)
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ asset_port }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
@@ -27,7 +26,7 @@
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ asset_port }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
become: "{{ custom_become | default(False) }}"
|
||||
@@ -49,7 +48,7 @@
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ asset_port }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
become: "{{ account.become.ansible_become | default(False) }}"
|
||||
become_method: su
|
||||
become_user: "{{ account.become.ansible_user | default('') }}"
|
||||
|
||||
@@ -6,15 +6,26 @@ category:
|
||||
type:
|
||||
- all
|
||||
method: change_secret
|
||||
protocol: ssh
|
||||
params:
|
||||
- name: commands
|
||||
type: list
|
||||
label: '自定义命令'
|
||||
label: "{{ 'Params commands label' | trans }}"
|
||||
default: [ '' ]
|
||||
help_text: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 {username}、{password}、{login_password}格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. {login_password}<br />3. configure terminal<br />4. username {username} privilege 0 password {password} <br />5. end'
|
||||
help_text: "{{ 'Params commands help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
SSH account change secret:
|
||||
zh: 使用 SSH 命令行自定义改密
|
||||
ja: SSH コマンドライン方式でカスタムパスワード変更
|
||||
en: Custom password change by SSH command line
|
||||
zh: '使用 SSH 命令行自定义改密'
|
||||
ja: 'SSH コマンドライン方式でカスタムパスワード変更'
|
||||
en: 'Custom password change by SSH command line'
|
||||
|
||||
Params commands help text:
|
||||
zh: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 {username}、{password}、{login_password}格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. {login_password}<br />3. configure terminal<br />4. username {username} privilege 0 password {password} <br />5. end'
|
||||
ja: 'カスタム コマンドに SSH 接続用のアカウント番号、パスワード、ユーザー パスワード フィールドを含める必要がある場合は、<br />{ユーザー名}、{パスワード}、{login_password& を使用してください。 # 125; 形式。タスクの実行時に置き換えられます。 <br />たとえば、Cisco ホストのパスワードを変更するには、通常、次の 5 つのコマンドを設定する必要があります:<br />1.enable<br />2.{login_password}<br />3 .ターミナルの設定<br / >4. ユーザー名 {ユーザー名} 権限 0 パスワード {パスワード} <br />5. 終了'
|
||||
en: 'If the custom command needs to include the account number, password, and user password field for SSH connection,<br />Please use {username}, {password}, {login_password&# 125; format, which will be replaced when executing the task. <br />For example, to change the password of a Cisco host, you generally need to configure five commands:<br />1. enable<br />2. {login_password}<br />3. configure terminal<br / >4. username {username} privilege 0 password {password} <br />5. end'
|
||||
|
||||
Params commands label:
|
||||
zh: '自定义命令'
|
||||
ja: 'カスタムコマンド'
|
||||
en: 'Custom command'
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
@@ -11,10 +12,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
@@ -28,10 +29,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
@@ -45,8 +46,8 @@
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
|
||||
@@ -4,7 +4,7 @@ from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from openpyxl import Workbook
|
||||
from xlsxwriter import Workbook
|
||||
|
||||
from accounts.const import AutomationTypes, SecretType, SSHKeyStrategy, SecretStrategy
|
||||
from accounts.models import ChangeSecretRecord
|
||||
@@ -139,7 +139,7 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': secret_type,
|
||||
'secret': new_secret,
|
||||
'secret': account.escape_jinja2_syntax(new_secret),
|
||||
'private_key_path': private_key_path,
|
||||
'become': account.get_ansible_become_auth(),
|
||||
}
|
||||
@@ -161,7 +161,8 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
account.secret = recorder.new_secret
|
||||
account.save(update_fields=['secret'])
|
||||
account.date_updated = timezone.now()
|
||||
account.save(update_fields=['secret', 'date_updated'])
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
recorder = self.name_recorder_mapper.get(host)
|
||||
@@ -227,8 +228,9 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
||||
|
||||
rows.insert(0, header)
|
||||
wb = Workbook(filename)
|
||||
ws = wb.create_sheet('Sheet1')
|
||||
for row in rows:
|
||||
ws.append(row)
|
||||
wb.save(filename)
|
||||
ws = wb.add_worksheet('Sheet1')
|
||||
for row_index, row_data in enumerate(rows):
|
||||
for col_index, col_data in enumerate(row_data):
|
||||
ws.write_string(row_index, col_index, col_data)
|
||||
wb.close()
|
||||
return True
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from .push_account.manager import PushAccountManager
|
||||
from .change_secret.manager import ChangeSecretManager
|
||||
from .verify_account.manager import VerifyAccountManager
|
||||
from .backup_account.manager import AccountBackupManager
|
||||
from .change_secret.manager import ChangeSecretManager
|
||||
from .gather_accounts.manager import GatherAccountsManager
|
||||
from .push_account.manager import PushAccountManager
|
||||
from .remove_account.manager import RemoveAccountManager
|
||||
from .verify_account.manager import VerifyAccountManager
|
||||
from .verify_gateway_account.manager import VerifyGatewayAccountManager
|
||||
from ..const import AutomationTypes
|
||||
|
||||
@@ -12,6 +13,7 @@ class ExecutionManager:
|
||||
AutomationTypes.push_account: PushAccountManager,
|
||||
AutomationTypes.change_secret: ChangeSecretManager,
|
||||
AutomationTypes.verify_account: VerifyAccountManager,
|
||||
AutomationTypes.remove_account: RemoveAccountManager,
|
||||
AutomationTypes.gather_accounts: GatherAccountsManager,
|
||||
AutomationTypes.verify_gateway_account: VerifyGatewayAccountManager,
|
||||
# TODO 后期迁移到自动化策略中
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
@@ -10,10 +11,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: users
|
||||
register: db_info
|
||||
|
||||
|
||||
@@ -72,7 +72,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
)
|
||||
gathered_accounts.append(gathered_account)
|
||||
if not self.is_sync_account:
|
||||
return
|
||||
continue
|
||||
GatheredAccount.sync_accounts(gathered_accounts)
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
db_name: "{{ jms_asset.spec_info.db_name }}"
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
@@ -11,10 +12,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
@@ -28,10 +29,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
@@ -45,8 +46,8 @@
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
|
||||
@@ -9,7 +9,7 @@ params:
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
@@ -18,19 +18,44 @@ params:
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: '家目录'
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: '默认家目录 /home/系统用户名: /home/username'
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Aix account push:
|
||||
zh: 使用 Ansible 模块 user 执行 Aix 账号推送 (DES)
|
||||
ja: Ansible user モジュールを使用して Aix アカウントをプッシュする (DES)
|
||||
en: Using Ansible module user to push account (DES)
|
||||
zh: '使用 Ansible 模块 user 执行 Aix 账号推送 (DES)'
|
||||
ja: 'Ansible user モジュールを使用して Aix アカウントをプッシュする (DES)'
|
||||
en: 'Using Ansible module user to push account (DES)'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ params:
|
||||
type: str
|
||||
label: 'Sudo'
|
||||
default: '/bin/whoami'
|
||||
help_text: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
help_text: "{{ 'Params sudo help text' | trans }}"
|
||||
|
||||
- name: shell
|
||||
type: str
|
||||
@@ -20,18 +20,43 @@ params:
|
||||
|
||||
- name: home
|
||||
type: str
|
||||
label: '家目录'
|
||||
label: "{{ 'Params home label' | trans }}"
|
||||
default: ''
|
||||
help_text: '默认家目录 /home/系统用户名: /home/username'
|
||||
help_text: "{{ 'Params home help text' | trans }}"
|
||||
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
default: ''
|
||||
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Posix account push:
|
||||
zh: 使用 Ansible 模块 user 执行账号推送 (sha512)
|
||||
ja: Ansible user モジュールを使用してアカウントをプッシュする (sha512)
|
||||
en: Using Ansible module user to push account (sha512)
|
||||
zh: '使用 Ansible 模块 user 执行账号推送 (sha512)'
|
||||
ja: 'Ansible user モジュールを使用してアカウントをプッシュする (sha512)'
|
||||
en: 'Using Ansible module user to push account (sha512)'
|
||||
|
||||
Params sudo help text:
|
||||
zh: '使用逗号分隔多个命令,如: /bin/whoami,/sbin/ifconfig'
|
||||
ja: 'コンマで区切って複数のコマンドを入力してください。例: /bin/whoami,/sbin/ifconfig'
|
||||
en: 'Use commas to separate multiple commands, such as: /bin/whoami,/sbin/ifconfig'
|
||||
|
||||
Params home help text:
|
||||
zh: '默认家目录 /home/{账号用户名}'
|
||||
ja: 'デフォルトのホームディレクトリ /home/{アカウントユーザ名}'
|
||||
en: 'Default home directory /home/{account username}'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params home label:
|
||||
zh: '家目录'
|
||||
ja: 'ホームディレクトリ'
|
||||
en: 'Home'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
@@ -10,10 +10,15 @@ params:
|
||||
type: str
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Windows account push:
|
||||
zh: 使用 Ansible 模块 win_user 执行 Windows 账号推送
|
||||
ja: Ansible win_user モジュールを使用して Windows アカウントをプッシュする
|
||||
en: Using Ansible module win_user to push account
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号推送'
|
||||
ja: 'Ansible win_user モジュールを使用して Windows アカウントをプッシュする'
|
||||
en: 'Using Ansible module win_user to push account'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
@@ -10,10 +10,15 @@ params:
|
||||
type: str
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Windows account push rdp verify:
|
||||
zh: 使用 Ansible 模块 win_user 执行 Windows 账号推送 RDP 协议测试最后的可连接性
|
||||
ja: Ansibleモジュールwin_userがWindowsアカウントプッシュRDPプロトコルテストを実行する最後の接続性
|
||||
en: Using the Ansible module win_user performs Windows account push RDP protocol testing for final connectivity
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号推送(最后使用 Python 模块 pyfreerdp 验证账号的可连接性)'
|
||||
ja: 'Ansible モジュール win_user を使用して Windows アカウントのプッシュを実行します (最後に Python モジュール pyfreerdp を使用してアカウントの接続性を確認します)'
|
||||
en: 'Use the Ansible module win_user to perform Windows account push (finally use the Python module pyfreerdp to verify the connectability of the account)'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
- hosts: mongodb
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
mongodb_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
ssl: "{{ jms_asset.spec_info.use_ssl }}"
|
||||
ssl_ca_certs: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
@@ -0,0 +1,12 @@
|
||||
id: remove_account_mongodb
|
||||
name: "{{ 'MongoDB account remove' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mongodb
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
MongoDB account remove:
|
||||
zh: 使用 Ansible 模块 mongodb 删除账号
|
||||
ja: Ansible モジュール mongodb を使用してアカウントを削除する
|
||||
en: Delete account using Ansible module mongodb
|
||||
@@ -0,0 +1,18 @@
|
||||
- hosts: mysql
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
community.mysql.mysql_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
@@ -0,0 +1,14 @@
|
||||
id: remove_account_mysql
|
||||
name: "{{ 'MySQL account remove' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- mysql
|
||||
- mariadb
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
MySQL account remove:
|
||||
zh: 使用 Ansible 模块 mysql_user 删除账号
|
||||
ja: Ansible モジュール mysql_user を使用してアカウントを削除します
|
||||
en: Use the Ansible module mysql_user to delete the account
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
- hosts: oracle
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
oracle_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_database: "{{ jms_asset.spec_info.db_name }}"
|
||||
mode: "{{ jms_account.mode }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
@@ -0,0 +1,12 @@
|
||||
id: remove_account_oracle
|
||||
name: "{{ 'Oracle account remove' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- oracle
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
Oracle account remove:
|
||||
zh: 使用 Python 模块 oracledb 删除账号
|
||||
ja: Python モジュール oracledb を使用してアカウントを検証する
|
||||
en: Using Python module oracledb to verify account
|
||||
@@ -0,0 +1,15 @@
|
||||
- hosts: postgresql
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
community.postgresql.postgresql_user:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
db: "{{ jms_asset.spec_info.db_name }}"
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
@@ -0,0 +1,12 @@
|
||||
id: remove_account_postgresql
|
||||
name: "{{ 'PostgreSQL account remove' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- postgresql
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
PostgreSQL account remove:
|
||||
zh: 使用 Ansible 模块 postgresql_user 删除账号
|
||||
ja: Ansible モジュール postgresql_user を使用してアカウントを削除します
|
||||
en: Use the Ansible module postgresql_user to delete the account
|
||||
@@ -0,0 +1,14 @@
|
||||
- hosts: sqlserver
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
community.general.mssql_script:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: "{{ jms_asset.spec_info.db_name }}"
|
||||
script: "DROP USER {{ account.username }}"
|
||||
@@ -0,0 +1,12 @@
|
||||
id: remove_account_sqlserver
|
||||
name: "{{ 'SQLServer account remove' | trans }}"
|
||||
category: database
|
||||
type:
|
||||
- sqlserver
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
SQLServer account remove:
|
||||
zh: 使用 Ansible 模块 mssql 删除账号
|
||||
ja: Ansible モジュール mssql を使用してアカウントを削除する
|
||||
en: Use Ansible module mssql to delete account
|
||||
26
apps/accounts/automations/remove_account/host/posix/main.yml
Normal file
26
apps/accounts/automations/remove_account/host/posix/main.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: "Get user home directory path"
|
||||
ansible.builtin.shell:
|
||||
cmd: "getent passwd {{ account.username }} | cut -d: -f6"
|
||||
register: user_home_dir
|
||||
ignore_errors: yes
|
||||
|
||||
- name: "Check if user home directory exists"
|
||||
ansible.builtin.stat:
|
||||
path: "{{ user_home_dir.stdout }}"
|
||||
register: home_dir
|
||||
when: user_home_dir.stdout != ""
|
||||
|
||||
- name: "Rename user home directory if it exists"
|
||||
ansible.builtin.command:
|
||||
cmd: "mv {{ user_home_dir.stdout }} {{ user_home_dir.stdout }}.bak"
|
||||
when: home_dir.stat | default(false) and user_home_dir.stdout != ""
|
||||
|
||||
- name: "Remove account"
|
||||
ansible.builtin.user:
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
remove: "{{ home_dir.stat.exists }}"
|
||||
when: home_dir.stat | default(false)
|
||||
@@ -0,0 +1,13 @@
|
||||
id: remove_account_posix
|
||||
name: "{{ 'Posix account remove' | trans }}"
|
||||
category: host
|
||||
type:
|
||||
- linux
|
||||
- unix
|
||||
method: remove_account
|
||||
|
||||
i18n:
|
||||
Posix account remove:
|
||||
zh: 使用 Ansible 模块 user 删除账号
|
||||
ja: Ansible モジュール ユーザーを使用してアカウントを削除します
|
||||
en: Use the Ansible module user to delete the account
|
||||
@@ -0,0 +1,9 @@
|
||||
- hosts: windows
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
ansible.windows.win_user:
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
purge: yes
|
||||
force: yes
|
||||
@@ -0,0 +1,13 @@
|
||||
id: remove_account_windows
|
||||
name: "{{ 'Windows account remove' | trans }}"
|
||||
version: 1
|
||||
method: remove_account
|
||||
category: host
|
||||
type:
|
||||
- windows
|
||||
|
||||
i18n:
|
||||
Windows account remove:
|
||||
zh: 使用 Ansible 模块 win_user 删除账号
|
||||
ja: Ansible モジュール win_user を使用してアカウントを削除する
|
||||
en: Use the Ansible module win_user to delete an account
|
||||
67
apps/accounts/automations/remove_account/manager.py
Normal file
67
apps/accounts/automations/remove_account/manager.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import os
|
||||
from copy import deepcopy
|
||||
|
||||
from django.db.models import QuerySet
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import Account
|
||||
from common.utils import get_logger
|
||||
from ..base.manager import AccountBasePlaybookManager
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class RemoveAccountManager(AccountBasePlaybookManager):
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.host_account_mapper = {}
|
||||
|
||||
def prepare_runtime_dir(self):
|
||||
path = super().prepare_runtime_dir()
|
||||
ansible_config_path = os.path.join(path, 'ansible.cfg')
|
||||
|
||||
with open(ansible_config_path, 'w') as f:
|
||||
f.write('[ssh_connection]\n')
|
||||
f.write('ssh_args = -o ControlMaster=no -o ControlPersist=no\n')
|
||||
return path
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.remove_account
|
||||
|
||||
def get_gather_accounts(self, privilege_account, gather_accounts: QuerySet):
|
||||
gather_account_ids = self.execution.snapshot['gather_accounts']
|
||||
gather_accounts = gather_accounts.filter(id__in=gather_account_ids)
|
||||
gather_accounts = gather_accounts.exclude(
|
||||
username__in=[privilege_account.username, 'root', 'Administrator']
|
||||
)
|
||||
return gather_accounts
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
gather_accounts = asset.gatheredaccount_set.all()
|
||||
gather_accounts = self.get_gather_accounts(account, gather_accounts)
|
||||
|
||||
inventory_hosts = []
|
||||
|
||||
for gather_account in gather_accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + gather_account.username + ')'
|
||||
self.host_account_mapper[h['name']] = (asset, gather_account)
|
||||
h['account'] = {'username': gather_account.username}
|
||||
inventory_hosts.append(h)
|
||||
return inventory_hosts
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
tuple_asset_gather_account = self.host_account_mapper.get(host)
|
||||
if not tuple_asset_gather_account:
|
||||
return
|
||||
asset, gather_account = tuple_asset_gather_account
|
||||
Account.objects.filter(
|
||||
asset_id=asset.id,
|
||||
username=gather_account.username
|
||||
).delete()
|
||||
gather_account.delete()
|
||||
@@ -8,7 +8,7 @@
|
||||
- name: Verify account (pyfreerdp)
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'rdp') | map(attribute='port') | first }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
|
||||
@@ -5,9 +5,10 @@ category:
|
||||
type:
|
||||
- windows
|
||||
method: verify_account
|
||||
protocol: rdp
|
||||
|
||||
i18n:
|
||||
Windows rdp account verify:
|
||||
zh: 使用 Python 模块 pyfreerdp 验证账号
|
||||
ja: Python モジュール pyfreerdp を使用してアカウントを検証する
|
||||
en: Using Python module pyfreerdp to verify account
|
||||
zh: '使用 Python 模块 pyfreerdp 验证账号'
|
||||
ja: 'Python モジュール pyfreerdp を使用してアカウントを検証する'
|
||||
en: 'Using Python module pyfreerdp to verify account'
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
- name: Verify account (paramiko)
|
||||
ssh_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'ssh') | map(attribute='port') | first }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
|
||||
@@ -6,9 +6,10 @@ category:
|
||||
type:
|
||||
- all
|
||||
method: verify_account
|
||||
protocol: ssh
|
||||
|
||||
i18n:
|
||||
SSH account verify:
|
||||
zh: 使用 Python 模块 paramiko 验证账号
|
||||
ja: Python モジュール paramiko を使用してアカウントを検証する
|
||||
en: Using Python module paramiko to verify account
|
||||
zh: '使用 Python 模块 paramiko 验证账号'
|
||||
ja: 'Python モジュール paramiko を使用してアカウントを検証する'
|
||||
en: 'Using Python module paramiko to verify account'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
- hosts: mongdb
|
||||
- hosts: mongodb
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
@@ -10,8 +11,8 @@
|
||||
login_password: "{{ account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
|
||||
@@ -62,7 +62,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': secret,
|
||||
'secret': account.escape_jinja2_syntax(secret),
|
||||
'private_key_path': private_key_path,
|
||||
'become': account.get_ansible_become_auth(),
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ class AutomationTypes(models.TextChoices):
|
||||
push_account = 'push_account', _('Push account')
|
||||
change_secret = 'change_secret', _('Change secret')
|
||||
verify_account = 'verify_account', _('Verify account')
|
||||
remove_account = 'remove_account', _('Remove account')
|
||||
gather_accounts = 'gather_accounts', _('Gather accounts')
|
||||
verify_gateway_account = 'verify_gateway_account', _('Verify gateway account')
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ class AccountFilterSet(BaseFilterSet):
|
||||
|
||||
class GatheredAccountFilterSet(BaseFilterSet):
|
||||
node_id = drf_filters.CharFilter(method='filter_nodes')
|
||||
asset_id = drf_filters.CharFilter(field_name='asset_id', lookup_expr='exact')
|
||||
|
||||
@staticmethod
|
||||
def filter_nodes(queryset, name, value):
|
||||
@@ -58,4 +59,4 @@ class GatheredAccountFilterSet(BaseFilterSet):
|
||||
|
||||
class Meta:
|
||||
model = GatheredAccount
|
||||
fields = ['id', 'asset_id', 'username']
|
||||
fields = ['id', 'username']
|
||||
|
||||
@@ -4,7 +4,6 @@ from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('accounts', '0006_gatheredaccount'),
|
||||
]
|
||||
@@ -12,6 +11,13 @@ class Migration(migrations.Migration):
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name='account',
|
||||
options={'permissions': [('view_accountsecret', 'Can view asset account secret'), ('view_historyaccount', 'Can view asset history account'), ('view_historyaccountsecret', 'Can view asset history account secret'), ('verify_account', 'Can verify account'), ('push_account', 'Can push account')], 'verbose_name': 'Account'},
|
||||
options={'permissions': [
|
||||
('view_accountsecret', 'Can view asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||
('verify_account', 'Can verify account'),
|
||||
('push_account', 'Can push account'),
|
||||
('remove_account', 'Can remove account'),
|
||||
], 'verbose_name': 'Account'},
|
||||
),
|
||||
]
|
||||
|
||||
@@ -4,6 +4,7 @@ from simple_history.models import HistoricalRecords
|
||||
|
||||
from assets.models.base import AbsConnectivity
|
||||
from common.utils import lazyproperty
|
||||
from labels.mixins import LabeledMixin
|
||||
from .base import BaseAccount
|
||||
from .mixins import VaultModelMixin
|
||||
from ..const import Source
|
||||
@@ -42,7 +43,7 @@ class AccountHistoricalRecords(HistoricalRecords):
|
||||
return super().create_history_model(model, inherited)
|
||||
|
||||
|
||||
class Account(AbsConnectivity, BaseAccount):
|
||||
class Account(AbsConnectivity, LabeledMixin, BaseAccount):
|
||||
asset = models.ForeignKey(
|
||||
'assets.Asset', related_name='accounts',
|
||||
on_delete=models.CASCADE, verbose_name=_('Asset')
|
||||
@@ -68,10 +69,15 @@ class Account(AbsConnectivity, BaseAccount):
|
||||
('view_historyaccountsecret', _('Can view asset history account secret')),
|
||||
('verify_account', _('Can verify account')),
|
||||
('push_account', _('Can push account')),
|
||||
('remove_account', _('Can remove account')),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
return '{}'.format(self.username)
|
||||
if self.asset_id:
|
||||
host = self.asset.name
|
||||
else:
|
||||
host = 'Dynamic'
|
||||
return '{}({})'.format(self.name, host)
|
||||
|
||||
@lazyproperty
|
||||
def platform(self):
|
||||
@@ -95,14 +101,13 @@ class Account(AbsConnectivity, BaseAccount):
|
||||
""" 排除自己和以自己为 su-from 的账号 """
|
||||
return self.asset.accounts.exclude(id=self.id).exclude(su_from=self)
|
||||
|
||||
@staticmethod
|
||||
def make_account_ansible_vars(su_from):
|
||||
def make_account_ansible_vars(self, su_from):
|
||||
var = {
|
||||
'ansible_user': su_from.username,
|
||||
}
|
||||
if not su_from.secret:
|
||||
return var
|
||||
var['ansible_password'] = su_from.secret
|
||||
var['ansible_password'] = self.escape_jinja2_syntax(su_from.secret)
|
||||
var['ansible_ssh_private_key_file'] = su_from.private_key_path
|
||||
return var
|
||||
|
||||
@@ -119,9 +124,25 @@ class Account(AbsConnectivity, BaseAccount):
|
||||
auth['ansible_become'] = True
|
||||
auth['ansible_become_method'] = become_method
|
||||
auth['ansible_become_user'] = self.username
|
||||
auth['ansible_become_password'] = password
|
||||
auth['ansible_become_password'] = self.escape_jinja2_syntax(password)
|
||||
return auth
|
||||
|
||||
@staticmethod
|
||||
def escape_jinja2_syntax(value):
|
||||
if not isinstance(value, str):
|
||||
return value
|
||||
|
||||
def escape(v):
|
||||
v = v.replace('{{', '__TEMP_OPEN_BRACES__') \
|
||||
.replace('}}', '__TEMP_CLOSE_BRACES__')
|
||||
|
||||
v = v.replace('__TEMP_OPEN_BRACES__', '{{ "{{" }}') \
|
||||
.replace('__TEMP_CLOSE_BRACES__', '{{ "}}" }}')
|
||||
|
||||
return v.replace('{%', '{{ "{%" }}').replace('%}', '{{ "%}" }}')
|
||||
|
||||
return escape(value)
|
||||
|
||||
|
||||
def replace_history_model_with_mixin():
|
||||
"""
|
||||
|
||||
@@ -3,13 +3,14 @@ from django.db.models import Count, Q
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from labels.mixins import LabeledMixin
|
||||
from .account import Account
|
||||
from .base import BaseAccount, SecretWithRandomMixin
|
||||
|
||||
__all__ = ['AccountTemplate', ]
|
||||
|
||||
|
||||
class AccountTemplate(BaseAccount, SecretWithRandomMixin):
|
||||
class AccountTemplate(LabeledMixin, BaseAccount, SecretWithRandomMixin):
|
||||
su_from = models.ForeignKey(
|
||||
'self', related_name='su_to', null=True,
|
||||
on_delete=models.SET_NULL, verbose_name=_("Su from")
|
||||
|
||||
@@ -3,8 +3,8 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.tasks import send_mail_attachment_async, upload_backup_to_obj_storage
|
||||
from notifications.notifications import UserMessage
|
||||
from users.models import User
|
||||
from terminal.models.component.storage import ReplayStorage
|
||||
from users.models import User
|
||||
|
||||
|
||||
class AccountBackupExecutionTaskMsg(object):
|
||||
@@ -23,8 +23,8 @@ class AccountBackupExecutionTaskMsg(object):
|
||||
else:
|
||||
return _("{} - The account backup passage task has been completed: "
|
||||
"the encryption password has not been set - "
|
||||
"please go to personal information -> file encryption password "
|
||||
"to set the encryption password").format(name)
|
||||
"please go to personal information -> Basic file encryption password for preference settings"
|
||||
).format(name)
|
||||
|
||||
def publish(self, attachment_list=None):
|
||||
send_mail_attachment_async(
|
||||
|
||||
19
apps/accounts/permissions.py
Normal file
19
apps/accounts/permissions.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from rest_framework import permissions
|
||||
|
||||
|
||||
def check_permissions(request):
|
||||
act = request.data.get('action')
|
||||
if act == 'push':
|
||||
code = 'accounts.push_account'
|
||||
elif act == 'remove':
|
||||
code = 'accounts.remove_account'
|
||||
else:
|
||||
code = 'accounts.verify_account'
|
||||
return request.user.has_perm(code)
|
||||
|
||||
|
||||
class AccountTaskActionPermission(permissions.IsAuthenticated):
|
||||
|
||||
def has_permission(self, request, view):
|
||||
return super().has_permission(request, view) \
|
||||
and check_permissions(request)
|
||||
@@ -10,7 +10,7 @@ from rest_framework.generics import get_object_or_404
|
||||
from rest_framework.validators import UniqueTogetherValidator
|
||||
|
||||
from accounts.const import SecretType, Source, AccountInvalidPolicy
|
||||
from accounts.models import Account, AccountTemplate
|
||||
from accounts.models import Account, AccountTemplate, GatheredAccount
|
||||
from accounts.tasks import push_accounts_to_assets_task
|
||||
from assets.const import Category, AllTypes
|
||||
from assets.models import Asset
|
||||
@@ -66,6 +66,9 @@ class AccountCreateUpdateSerializerMixin(serializers.Serializer):
|
||||
name = initial_data.get('name')
|
||||
if name is not None:
|
||||
return
|
||||
request = self.context.get('request')
|
||||
if request and request.method == 'PATCH':
|
||||
return
|
||||
if not name:
|
||||
name = initial_data.get('username')
|
||||
if self.instance and self.instance.name == name:
|
||||
@@ -238,7 +241,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
queryset = queryset.prefetch_related(
|
||||
'asset', 'asset__platform',
|
||||
'asset__platform__automation'
|
||||
)
|
||||
).prefetch_related('labels', 'labels__label')
|
||||
return queryset
|
||||
|
||||
|
||||
@@ -455,11 +458,15 @@ class AccountTaskSerializer(serializers.Serializer):
|
||||
('test', 'test'),
|
||||
('verify', 'verify'),
|
||||
('push', 'push'),
|
||||
('remove', 'remove'),
|
||||
)
|
||||
action = serializers.ChoiceField(choices=ACTION_CHOICES, write_only=True)
|
||||
accounts = serializers.PrimaryKeyRelatedField(
|
||||
queryset=Account.objects, required=False, allow_empty=True, many=True
|
||||
)
|
||||
gather_accounts = serializers.PrimaryKeyRelatedField(
|
||||
queryset=GatheredAccount.objects, required=False, allow_empty=True, many=True
|
||||
)
|
||||
task = serializers.CharField(read_only=True)
|
||||
params = serializers.JSONField(
|
||||
decoder=None, encoder=None, required=False,
|
||||
|
||||
@@ -5,6 +5,7 @@ from rest_framework import serializers
|
||||
from accounts.const import SecretType
|
||||
from accounts.models import BaseAccount
|
||||
from accounts.utils import validate_password_for_ansible, validate_ssh_key
|
||||
from common.serializers import ResourceLabelsMixin
|
||||
from common.serializers.fields import EncryptedField, LabeledChoiceField
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
@@ -60,8 +61,7 @@ class AuthValidateMixin(serializers.Serializer):
|
||||
return super().update(instance, validated_data)
|
||||
|
||||
|
||||
class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||
|
||||
class BaseAccountSerializer(AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer):
|
||||
class Meta:
|
||||
model = BaseAccount
|
||||
fields_mini = ['id', 'name', 'username']
|
||||
@@ -70,7 +70,7 @@ class BaseAccountSerializer(AuthValidateMixin, BulkOrgResourceModelSerializer):
|
||||
'privileged', 'is_active', 'spec_info',
|
||||
]
|
||||
fields_other = ['created_by', 'date_created', 'date_updated', 'comment']
|
||||
fields = fields_small + fields_other
|
||||
fields = fields_small + fields_other + ['labels']
|
||||
read_only_fields = [
|
||||
'spec_info', 'date_verified', 'created_by', 'date_created',
|
||||
]
|
||||
|
||||
@@ -15,6 +15,9 @@ class PasswordRulesSerializer(serializers.Serializer):
|
||||
uppercase = serializers.BooleanField(default=True, label=_('Uppercase'))
|
||||
digit = serializers.BooleanField(default=True, label=_('Digit'))
|
||||
symbol = serializers.BooleanField(default=True, label=_('Special symbol'))
|
||||
exclude_symbols = serializers.CharField(
|
||||
default='', allow_blank=True, max_length=16, label=_('Exclude symbol')
|
||||
)
|
||||
|
||||
|
||||
class AccountTemplateSerializer(BaseAccountSerializer):
|
||||
|
||||
@@ -21,7 +21,8 @@ def on_account_pre_save(sender, instance, **kwargs):
|
||||
if instance.version == 0:
|
||||
instance.version = 1
|
||||
else:
|
||||
instance.version = instance.history.count()
|
||||
history_account = instance.history.first()
|
||||
instance.version = history_account.version + 1 if history_account else 0
|
||||
|
||||
|
||||
@merge_delay_run(ttl=5)
|
||||
|
||||
@@ -2,5 +2,6 @@ from .automation import *
|
||||
from .backup_account import *
|
||||
from .gather_accounts import *
|
||||
from .push_account import *
|
||||
from .remove_account import *
|
||||
from .template import *
|
||||
from .verify_account import *
|
||||
|
||||
77
apps/accounts/tasks/remove_account.py
Normal file
77
apps/accounts/tasks/remove_account.py
Normal file
@@ -0,0 +1,77 @@
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
|
||||
from celery import shared_task, current_task
|
||||
from django.conf import settings
|
||||
from django.db.models import Count
|
||||
from django.utils.translation import gettext_noop, gettext_lazy as _
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import Account
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from audits.const import ActivityChoices
|
||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||
from common.utils import get_logger
|
||||
from ops.celery.decorator import register_as_period_task
|
||||
from orgs.utils import tmp_to_root_org
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['remove_accounts_task']
|
||||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible", verbose_name=_('Remove account'),
|
||||
activity_callback=lambda self, gather_account_ids, *args, **kwargs: (gather_account_ids, None)
|
||||
)
|
||||
def remove_accounts_task(gather_account_ids):
|
||||
from accounts.models import GatheredAccount
|
||||
|
||||
gather_accounts = GatheredAccount.objects.filter(
|
||||
id__in=gather_account_ids
|
||||
)
|
||||
task_name = gettext_noop("Remove account")
|
||||
|
||||
task_snapshot = {
|
||||
'assets': [str(i.asset_id) for i in gather_accounts],
|
||||
'gather_accounts': [str(i.id) for i in gather_accounts],
|
||||
}
|
||||
|
||||
tp = AutomationTypes.remove_account
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(verbose_name=_('Clean historical accounts'))
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_TWO)
|
||||
@tmp_to_root_org()
|
||||
def clean_historical_accounts():
|
||||
from audits.signal_handlers import create_activities
|
||||
print("Clean historical accounts start.")
|
||||
if settings.HISTORY_ACCOUNT_CLEAN_LIMIT >= 999:
|
||||
return
|
||||
limit = settings.HISTORY_ACCOUNT_CLEAN_LIMIT
|
||||
|
||||
history_ids_to_be_deleted = []
|
||||
history_model = Account.history.model
|
||||
history_id_mapper = defaultdict(list)
|
||||
|
||||
ids = history_model.objects.values('id').annotate(count=Count('id')) \
|
||||
.filter(count__gte=limit).values_list('id', flat=True)
|
||||
|
||||
if not ids:
|
||||
return
|
||||
|
||||
for i in history_model.objects.filter(id__in=ids):
|
||||
_id = str(i.id)
|
||||
history_id_mapper[_id].append(i.history_id)
|
||||
|
||||
for history_ids in history_id_mapper.values():
|
||||
history_ids_to_be_deleted.extend(history_ids[limit:])
|
||||
history_qs = history_model.objects.filter(history_id__in=history_ids_to_be_deleted)
|
||||
|
||||
resource_ids = list(history_qs.values_list('history_id', flat=True))
|
||||
history_qs.delete()
|
||||
|
||||
task_id = current_task.request.id if current_task else str(uuid.uuid4())
|
||||
detail = gettext_noop('Remove historical accounts that are out of range.')
|
||||
create_activities(resource_ids, detail, task_id, action=ActivityChoices.task, org_id='')
|
||||
@@ -30,7 +30,8 @@ class SecretGenerator:
|
||||
'lower': rules['lowercase'],
|
||||
'upper': rules['uppercase'],
|
||||
'digit': rules['digit'],
|
||||
'special_char': rules['symbol']
|
||||
'special_char': rules['symbol'],
|
||||
'exclude_chars': rules.get('exclude_symbols', ''),
|
||||
}
|
||||
return random_string(**rules)
|
||||
|
||||
@@ -46,18 +47,10 @@ class SecretGenerator:
|
||||
|
||||
def validate_password_for_ansible(password):
|
||||
""" 校验 Ansible 不支持的特殊字符 """
|
||||
# validate password contains left double curly bracket
|
||||
# check password not contains `{{`
|
||||
# Ansible 推送的时候不支持
|
||||
if '{{' in password or '}}' in password:
|
||||
raise serializers.ValidationError(_('Password can not contains `{{` or `}}`'))
|
||||
if '{%' in password or '%}' in password:
|
||||
raise serializers.ValidationError(_('Password can not contains `{%` or `%}`'))
|
||||
# Ansible Windows 推送的时候不支持
|
||||
# if "'" in password:
|
||||
# raise serializers.ValidationError(_("Password can not contains `'` "))
|
||||
# if '"' in password:
|
||||
# raise serializers.ValidationError(_('Password can not contains `"` '))
|
||||
if password.startswith('{{') and password.endswith('}}'):
|
||||
raise serializers.ValidationError(
|
||||
_('If the password starts with {{` and ends with }} `, then the password is not allowed.')
|
||||
)
|
||||
|
||||
|
||||
def validate_ssh_key(ssh_key, passphrase=None):
|
||||
|
||||
@@ -11,7 +11,7 @@ __all__ = ['CommandFilterACLViewSet', 'CommandGroupViewSet']
|
||||
class CommandGroupViewSet(OrgBulkModelViewSet):
|
||||
model = models.CommandGroup
|
||||
filterset_fields = ('name', 'command_filters')
|
||||
search_fields = filterset_fields
|
||||
search_fields = ('name',)
|
||||
serializer_class = serializers.CommandGroupSerializer
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.models import Account
|
||||
from assets.models import Asset
|
||||
from audits.models import UserLoginLog
|
||||
from notifications.notifications import UserMessage
|
||||
@@ -16,12 +17,11 @@ class UserLoginReminderMsg(UserMessage):
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
user_log = self.user_log
|
||||
|
||||
context = {
|
||||
'ip': user_log.ip,
|
||||
'city': user_log.city,
|
||||
'username': user_log.username,
|
||||
'recipient': self.user.username,
|
||||
'recipient': self.user,
|
||||
'user_agent': user_log.user_agent,
|
||||
}
|
||||
message = render_to_string('acls/user_login_reminder.html', context)
|
||||
@@ -48,11 +48,14 @@ class AssetLoginReminderMsg(UserMessage):
|
||||
super().__init__(user)
|
||||
|
||||
def get_html_msg(self) -> dict:
|
||||
account = Account.objects.get(asset=self.asset, username=self.account_username)
|
||||
context = {
|
||||
'recipient': self.user.username,
|
||||
'recipient': self.user,
|
||||
'username': self.login_user.username,
|
||||
'name': self.login_user.name,
|
||||
'asset': str(self.asset),
|
||||
'account': self.account_username,
|
||||
'account_name': account.name,
|
||||
}
|
||||
message = render_to_string('acls/asset_login_reminder.html', context)
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Respectful' %}{{ recipient }},</h3>
|
||||
<h3>{% trans 'Respectful' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<hr>
|
||||
<p><strong>{% trans 'Username' %}:</strong> [{{ username }}]</p>
|
||||
<p><strong>{% trans 'User' %}:</strong> [{{ name }}({{ username }})]</p>
|
||||
<p><strong>{% trans 'Assets' %}:</strong> [{{ asset }}]</p>
|
||||
<p><strong>{% trans 'Account' %}:</strong> [{{ account }}]</p>
|
||||
<p><strong>{% trans 'Account' %}:</strong> [{{ account_name }}({{ account }})]</p>
|
||||
<hr>
|
||||
|
||||
<p>{% trans 'The user has just logged in to the asset. Please ensure that this is an authorized operation. If you suspect that this is an unauthorized access, please take appropriate measures immediately.' %}</p>
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{% load i18n %}
|
||||
|
||||
<h3>{% trans 'Respectful' %}{{ recipient }},</h3>
|
||||
<h3>{% trans 'Respectful' %}: {{ recipient.name }}[{{ recipient.username }}]</h3>
|
||||
<hr>
|
||||
<p><strong>{% trans 'Username' %}:</strong> [{{ username }}]</p>
|
||||
<p><strong>{% trans 'User' %}:</strong> [{{ username }}]</p>
|
||||
<p><strong>IP:</strong> [{{ ip }}]</p>
|
||||
<p><strong>{% trans 'Login city' %}:</strong> [{{ city }}]</p>
|
||||
<p><strong>{% trans 'User agent' %}:</strong> [{{ user_agent }}]</p>
|
||||
|
||||
@@ -2,7 +2,6 @@ from .asset import *
|
||||
from .category import *
|
||||
from .domain import *
|
||||
from .favorite_asset import *
|
||||
from .label import *
|
||||
from .mixin import *
|
||||
from .node import *
|
||||
from .platform import *
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from collections import defaultdict
|
||||
|
||||
import django_filters
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework import status
|
||||
@@ -14,7 +13,7 @@ from rest_framework.status import HTTP_200_OK
|
||||
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
|
||||
from assets import serializers
|
||||
from assets.exceptions import NotSupportedTemporarilyError
|
||||
from assets.filters import IpInFilterBackend, LabelFilterBackend, NodeFilterBackend
|
||||
from assets.filters import IpInFilterBackend, NodeFilterBackend
|
||||
from assets.models import Asset, Gateway, Platform, Protocol
|
||||
from assets.tasks import test_assets_connectivity_manual, update_assets_hardware_info_manual
|
||||
from common.api import SuggestionMixin
|
||||
@@ -22,7 +21,6 @@ from common.drf.filters import BaseFilterSet, AttrRulesFilterBackend
|
||||
from common.utils import get_logger, is_uuid
|
||||
from orgs.mixins import generics
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..mixin import NodeFilterMixin
|
||||
from ...notifications import BulkUpdatePlatformSkipAssetUserMsg
|
||||
|
||||
logger = get_logger(__file__)
|
||||
@@ -33,7 +31,6 @@ __all__ = [
|
||||
|
||||
|
||||
class AssetFilterSet(BaseFilterSet):
|
||||
labels = django_filters.CharFilter(method='filter_labels')
|
||||
platform = django_filters.CharFilter(method='filter_platform')
|
||||
domain = django_filters.CharFilter(method='filter_domain')
|
||||
type = django_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
@@ -64,7 +61,7 @@ class AssetFilterSet(BaseFilterSet):
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields = [
|
||||
"id", "name", "address", "is_active", "labels",
|
||||
"id", "name", "address", "is_active",
|
||||
"type", "category", "platform",
|
||||
]
|
||||
|
||||
@@ -87,25 +84,15 @@ class AssetFilterSet(BaseFilterSet):
|
||||
value = value.split(',')
|
||||
return queryset.filter(protocols__name__in=value).distinct()
|
||||
|
||||
@staticmethod
|
||||
def filter_labels(queryset, name, value):
|
||||
if ':' in value:
|
||||
n, v = value.split(':', 1)
|
||||
queryset = queryset.filter(labels__name=n, labels__value=v)
|
||||
else:
|
||||
q = Q(labels__name__contains=value) | Q(labels__value__contains=value)
|
||||
queryset = queryset.filter(q).distinct()
|
||||
return queryset
|
||||
|
||||
|
||||
class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||
class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
"""
|
||||
API endpoint that allows Asset to be viewed or edited.
|
||||
"""
|
||||
model = Asset
|
||||
filterset_class = AssetFilterSet
|
||||
search_fields = ("name", "address", "comment")
|
||||
ordering_fields = ('name', 'connectivity', 'platform', 'date_updated')
|
||||
ordering_fields = ('name', 'address', 'connectivity', 'platform', 'date_updated', 'date_created')
|
||||
serializer_classes = (
|
||||
("default", serializers.AssetSerializer),
|
||||
("platform", serializers.PlatformSerializer),
|
||||
@@ -121,14 +108,12 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
||||
("sync_platform_protocols", "assets.change_asset"),
|
||||
)
|
||||
extra_filter_backends = [
|
||||
LabelFilterBackend, IpInFilterBackend,
|
||||
IpInFilterBackend,
|
||||
NodeFilterBackend, AttrRulesFilterBackend
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset() \
|
||||
.prefetch_related('nodes', 'protocols') \
|
||||
.select_related('platform', 'domain')
|
||||
queryset = super().get_queryset()
|
||||
if queryset.model is not Asset:
|
||||
queryset = queryset.select_related('asset_ptr')
|
||||
return queryset
|
||||
|
||||
@@ -20,14 +20,15 @@ class DomainViewSet(OrgBulkModelViewSet):
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
ordering = ('name',)
|
||||
serializer_classes = {
|
||||
'default': serializers.DomainSerializer,
|
||||
'list': serializers.DomainListSerializer,
|
||||
}
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('gateway'):
|
||||
return serializers.DomainWithGatewaySerializer
|
||||
return serializers.DomainSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return super().get_queryset().prefetch_related('assets')
|
||||
return super().get_serializer_class()
|
||||
|
||||
|
||||
class GatewayViewSet(HostViewSet):
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
# ~*~ coding: utf-8 ~*~
|
||||
# Copyright (C) 2014-2018 Beijing DuiZhan Technology Co.,Ltd. All Rights Reserved.
|
||||
#
|
||||
# Licensed under the GNU General Public License v2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.gnu.org/licenses/gpl-2.0.html
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from django.db.models import Count
|
||||
|
||||
from common.utils import get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from ..models import Label
|
||||
from .. import serializers
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = ['LabelViewSet']
|
||||
|
||||
|
||||
class LabelViewSet(OrgBulkModelViewSet):
|
||||
model = Label
|
||||
filterset_fields = ("name", "value")
|
||||
search_fields = filterset_fields
|
||||
serializer_class = serializers.LabelSerializer
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
if request.query_params.get("distinct"):
|
||||
self.serializer_class = serializers.LabelDistinctSerializer
|
||||
self.queryset = self.queryset.values("name").distinct()
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
def get_queryset(self):
|
||||
self.queryset = Label.objects.prefetch_related(
|
||||
'assets').annotate(asset_count=Count("assets"))
|
||||
return self.queryset
|
||||
@@ -2,7 +2,7 @@ from typing import List
|
||||
|
||||
from rest_framework.request import Request
|
||||
|
||||
from assets.models import Node, Protocol
|
||||
from assets.models import Node, Platform, Protocol
|
||||
from assets.utils import get_node_from_request, is_query_node_all_assets
|
||||
from common.utils import lazyproperty, timeit
|
||||
|
||||
@@ -71,37 +71,49 @@ class SerializeToTreeNodeMixin:
|
||||
return 'file'
|
||||
|
||||
@timeit
|
||||
def serialize_assets(self, assets, node_key=None, pid=None):
|
||||
if node_key is None:
|
||||
get_pid = lambda asset: getattr(asset, 'parent_key', '')
|
||||
else:
|
||||
get_pid = lambda asset: node_key
|
||||
def serialize_assets(self, assets, node_key=None, get_pid=None):
|
||||
if not get_pid and not node_key:
|
||||
get_pid = lambda asset, platform: getattr(asset, 'parent_key', '')
|
||||
|
||||
sftp_asset_ids = Protocol.objects.filter(name='sftp') \
|
||||
.values_list('asset_id', flat=True)
|
||||
sftp_asset_ids = list(sftp_asset_ids)
|
||||
data = [
|
||||
{
|
||||
sftp_asset_ids = set(sftp_asset_ids)
|
||||
platform_map = {p.id: p for p in Platform.objects.all()}
|
||||
|
||||
data = []
|
||||
root_assets_count = 0
|
||||
for asset in assets:
|
||||
platform = platform_map.get(asset.platform_id)
|
||||
if not platform:
|
||||
continue
|
||||
pid = node_key or get_pid(asset, platform)
|
||||
if not pid:
|
||||
continue
|
||||
# 根节点最多显示 1000 个资产
|
||||
if pid.isdigit():
|
||||
if root_assets_count > 1000:
|
||||
continue
|
||||
root_assets_count += 1
|
||||
data.append({
|
||||
'id': str(asset.id),
|
||||
'name': asset.name,
|
||||
'title': f'{asset.address}\n{asset.comment}',
|
||||
'pId': pid or get_pid(asset),
|
||||
'title': f'{asset.address}\n{asset.comment}'.strip(),
|
||||
'pId': pid,
|
||||
'isParent': False,
|
||||
'open': False,
|
||||
'iconSkin': self.get_icon(asset),
|
||||
'iconSkin': self.get_icon(platform),
|
||||
'chkDisabled': not asset.is_active,
|
||||
'meta': {
|
||||
'type': 'asset',
|
||||
'data': {
|
||||
'platform_type': asset.platform.type,
|
||||
'platform_type': platform.type,
|
||||
'org_name': asset.org_name,
|
||||
'sftp': asset.id in sftp_asset_ids,
|
||||
'name': asset.name,
|
||||
'address': asset.address
|
||||
},
|
||||
}
|
||||
}
|
||||
for asset in assets
|
||||
]
|
||||
})
|
||||
return data
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,10 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
# 因为没有走分页逻辑,所以需要这里 prefetch
|
||||
queryset = super().get_queryset().prefetch_related(
|
||||
'protocols', 'automation', 'labels', 'labels__label',
|
||||
)
|
||||
queryset = queryset.filter(type__in=AllTypes.get_types_values())
|
||||
return queryset
|
||||
|
||||
|
||||
@@ -126,6 +126,8 @@ class NodeChildrenAsTreeApi(SerializeToTreeNodeMixin, NodeChildrenApi):
|
||||
include_assets = self.request.query_params.get('assets', '0') == '1'
|
||||
if not self.instance or not include_assets:
|
||||
return Asset.objects.none()
|
||||
if self.instance.is_org_root():
|
||||
return Asset.objects.none()
|
||||
if query_all:
|
||||
assets = self.instance.get_all_assets()
|
||||
else:
|
||||
|
||||
@@ -17,6 +17,62 @@ from ops.ansible import JMSInventory, PlaybookRunner, DefaultCallback
|
||||
logger = get_logger(__name__)
|
||||
|
||||
|
||||
class SSHTunnelManager:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.gateway_servers = dict()
|
||||
|
||||
@staticmethod
|
||||
def file_to_json(path):
|
||||
with open(path, 'r') as f:
|
||||
d = json.load(f)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def json_to_file(path, data):
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=4, sort_keys=True)
|
||||
|
||||
def local_gateway_prepare(self, runner):
|
||||
info = self.file_to_json(runner.inventory)
|
||||
servers, not_valid = [], []
|
||||
for k, host in info['all']['hosts'].items():
|
||||
jms_asset, jms_gateway = host.get('jms_asset'), host.get('gateway')
|
||||
if not jms_gateway:
|
||||
continue
|
||||
try:
|
||||
server = SSHTunnelForwarder(
|
||||
(jms_gateway['address'], jms_gateway['port']),
|
||||
ssh_username=jms_gateway['username'],
|
||||
ssh_password=jms_gateway['secret'],
|
||||
ssh_pkey=jms_gateway['private_key_path'],
|
||||
remote_bind_address=(jms_asset['address'], jms_asset['port'])
|
||||
)
|
||||
server.start()
|
||||
except Exception as e:
|
||||
err_msg = 'Gateway is not active: %s' % jms_asset.get('name', '')
|
||||
print(f'\033[31m {err_msg} 原因: {e} \033[0m\n')
|
||||
not_valid.append(k)
|
||||
else:
|
||||
local_bind_port = server.local_bind_port
|
||||
host['ansible_host'] = jms_asset['address'] = host['login_host'] = '127.0.0.1'
|
||||
host['ansible_port'] = jms_asset['port'] = host['login_port'] = local_bind_port
|
||||
servers.append(server)
|
||||
|
||||
# 网域不可连接的,就不继续执行此资源的后续任务了
|
||||
for a in set(not_valid):
|
||||
info['all']['hosts'].pop(a)
|
||||
self.json_to_file(runner.inventory, info)
|
||||
self.gateway_servers[runner.id] = servers
|
||||
|
||||
def local_gateway_clean(self, runner):
|
||||
servers = self.gateway_servers.get(runner.id, [])
|
||||
for s in servers:
|
||||
try:
|
||||
s.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
class PlaybookCallback(DefaultCallback):
|
||||
def playbook_on_stats(self, event_data, **kwargs):
|
||||
super().playbook_on_stats(event_data, **kwargs)
|
||||
@@ -37,7 +93,6 @@ class BasePlaybookManager:
|
||||
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
||||
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
||||
self.playbooks = []
|
||||
self.gateway_servers = dict()
|
||||
params = self.execution.snapshot.get('params')
|
||||
self.params = params or {}
|
||||
|
||||
@@ -157,22 +212,19 @@ class BasePlaybookManager:
|
||||
os.chmod(key_path, 0o400)
|
||||
return key_path
|
||||
|
||||
def generate_inventory(self, platformed_assets, inventory_path):
|
||||
def generate_inventory(self, platformed_assets, inventory_path, protocol):
|
||||
inventory = JMSInventory(
|
||||
assets=platformed_assets,
|
||||
account_prefer=self.ansible_account_prefer,
|
||||
account_policy=self.ansible_account_policy,
|
||||
host_callback=self.host_callback,
|
||||
task_type=self.__class__.method_type(),
|
||||
protocol=protocol,
|
||||
)
|
||||
inventory.write_to_file(inventory_path)
|
||||
|
||||
def generate_playbook(self, platformed_assets, platform, sub_playbook_dir):
|
||||
method_id = getattr(platform.automation, '{}_method'.format(self.__class__.method_type()))
|
||||
method = self.method_id_meta_mapper.get(method_id)
|
||||
if not method:
|
||||
logger.error("Method not found: {}".format(method_id))
|
||||
return
|
||||
@staticmethod
|
||||
def generate_playbook(method, sub_playbook_dir):
|
||||
method_playbook_dir_path = method['dir']
|
||||
sub_playbook_path = os.path.join(sub_playbook_dir, 'project', 'main.yml')
|
||||
shutil.copytree(method_playbook_dir_path, os.path.dirname(sub_playbook_path))
|
||||
@@ -204,8 +256,16 @@ class BasePlaybookManager:
|
||||
sub_dir = '{}_{}'.format(platform.name, i)
|
||||
playbook_dir = os.path.join(self.runtime_dir, sub_dir)
|
||||
inventory_path = os.path.join(self.runtime_dir, sub_dir, 'hosts.json')
|
||||
self.generate_inventory(_assets, inventory_path)
|
||||
playbook_path = self.generate_playbook(_assets, platform, playbook_dir)
|
||||
|
||||
method_id = getattr(platform.automation, '{}_method'.format(self.__class__.method_type()))
|
||||
method = self.method_id_meta_mapper.get(method_id)
|
||||
|
||||
if not method:
|
||||
logger.error("Method not found: {}".format(method_id))
|
||||
continue
|
||||
protocol = method.get('protocol')
|
||||
self.generate_inventory(_assets, inventory_path, protocol)
|
||||
playbook_path = self.generate_playbook(method, playbook_dir)
|
||||
if not playbook_path:
|
||||
continue
|
||||
|
||||
@@ -247,66 +307,10 @@ class BasePlaybookManager:
|
||||
def on_runner_failed(self, runner, e):
|
||||
print("Runner failed: {} {}".format(e, self))
|
||||
|
||||
@staticmethod
|
||||
def file_to_json(path):
|
||||
with open(path, 'r') as f:
|
||||
d = json.load(f)
|
||||
return d
|
||||
|
||||
@staticmethod
|
||||
def json_dumps(data):
|
||||
return json.dumps(data, indent=4, sort_keys=True)
|
||||
|
||||
@staticmethod
|
||||
def json_to_file(path, data):
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=4, sort_keys=True)
|
||||
|
||||
def local_gateway_prepare(self, runner):
|
||||
info = self.file_to_json(runner.inventory)
|
||||
servers, not_valid = [], []
|
||||
for k, host in info['all']['hosts'].items():
|
||||
jms_asset, jms_gateway = host.get('jms_asset'), host.get('gateway')
|
||||
if not jms_gateway:
|
||||
continue
|
||||
try:
|
||||
server = SSHTunnelForwarder(
|
||||
(jms_gateway['address'], jms_gateway['port']),
|
||||
ssh_username=jms_gateway['username'],
|
||||
ssh_password=jms_gateway['secret'],
|
||||
ssh_pkey=jms_gateway['private_key_path'],
|
||||
remote_bind_address=(jms_asset['address'], jms_asset['port'])
|
||||
)
|
||||
server.start()
|
||||
except Exception as e:
|
||||
err_msg = 'Gateway is not active: %s' % jms_asset.get('name', '')
|
||||
print(f'\033[31m {err_msg} 原因: {e} \033[0m\n')
|
||||
not_valid.append(k)
|
||||
else:
|
||||
host['ansible_host'] = jms_asset['address'] = '127.0.0.1'
|
||||
host['ansible_port'] = jms_asset['port'] = server.local_bind_port
|
||||
servers.append(server)
|
||||
|
||||
# 网域不可连接的,就不继续执行此资源的后续任务了
|
||||
for a in set(not_valid):
|
||||
info['all']['hosts'].pop(a)
|
||||
self.json_to_file(runner.inventory, info)
|
||||
self.gateway_servers[runner.id] = servers
|
||||
|
||||
def local_gateway_clean(self, runner):
|
||||
servers = self.gateway_servers.get(runner.id, [])
|
||||
for s in servers:
|
||||
try:
|
||||
s.stop()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def before_runner_start(self, runner):
|
||||
self.local_gateway_prepare(runner)
|
||||
|
||||
def after_runner_end(self, runner):
|
||||
self.local_gateway_clean(runner)
|
||||
|
||||
def delete_runtime_dir(self):
|
||||
if settings.DEBUG_DEV:
|
||||
return
|
||||
@@ -326,14 +330,15 @@ class BasePlaybookManager:
|
||||
for i, runner in enumerate(runners, start=1):
|
||||
if len(runners) > 1:
|
||||
print(">>> 开始执行第 {} 批任务".format(i))
|
||||
self.before_runner_start(runner)
|
||||
ssh_tunnel = SSHTunnelManager()
|
||||
ssh_tunnel.local_gateway_prepare(runner)
|
||||
try:
|
||||
cb = runner.run(**kwargs)
|
||||
self.on_runner_success(runner, cb)
|
||||
except Exception as e:
|
||||
self.on_runner_failed(runner, e)
|
||||
finally:
|
||||
self.after_runner_end(runner)
|
||||
ssh_tunnel.local_gateway_clean(runner)
|
||||
print('\n')
|
||||
self.execution.status = 'success'
|
||||
self.execution.date_finished = timezone.now()
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Get info
|
||||
@@ -10,10 +11,10 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
register: db_info
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@ type:
|
||||
method: gather_facts
|
||||
i18n:
|
||||
Gather posix facts:
|
||||
zh: 使用 Ansible 指令 gather_facts 从主机获取设备信息
|
||||
en: Gather facts from asset using gather_facts
|
||||
ja: gather_factsを使用してPosixから情報を収集する
|
||||
zh: '使用 Ansible 指令 gather_facts 从主机获取设备信息'
|
||||
en: 'Gather facts from asset using gather_facts'
|
||||
ja: 'gather_factsを使用してPosixから情報を収集する'
|
||||
|
||||
@@ -7,6 +7,6 @@ type:
|
||||
- windows
|
||||
i18n:
|
||||
Gather facts windows:
|
||||
zh: 使用 Ansible 指令 gather_facts 从 Windows 获取设备信息
|
||||
en: Gather facts from Windows using gather_facts
|
||||
ja: gather_factsを使用してWindowsから情報を収集する
|
||||
zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息'
|
||||
en: 'Gather facts from Windows using gather_facts'
|
||||
ja: 'gather_factsを使用してWindowsから情報を収集する'
|
||||
|
||||
@@ -31,7 +31,7 @@ def generate_serializer(data):
|
||||
return create_serializer_class(serializer_name, params)
|
||||
|
||||
|
||||
def get_platform_automation_methods(path):
|
||||
def get_platform_automation_methods(path, lang=None):
|
||||
methods = []
|
||||
for root, dirs, files in os.walk(path, topdown=False):
|
||||
for name in files:
|
||||
@@ -40,7 +40,7 @@ def get_platform_automation_methods(path):
|
||||
continue
|
||||
|
||||
with open(path, 'r', encoding='utf8') as f:
|
||||
manifest = yaml_load_with_i18n(f)
|
||||
manifest = yaml_load_with_i18n(f, lang=lang)
|
||||
check_platform_method(manifest, path)
|
||||
manifest['dir'] = os.path.dirname(path)
|
||||
manifest['params_serializer'] = generate_serializer(manifest)
|
||||
|
||||
@@ -10,6 +10,6 @@
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'rdp') | map(attribute='port') | first }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
|
||||
@@ -6,8 +6,10 @@ category:
|
||||
type:
|
||||
- windows
|
||||
method: ping
|
||||
protocol: rdp
|
||||
|
||||
i18n:
|
||||
Ping by pyfreerdp:
|
||||
zh: 使用 Python 模块 pyfreerdp 测试主机可连接性
|
||||
en: Ping by pyfreerdp module
|
||||
ja: Pyfreerdpモジュールを使用してホストにPingする
|
||||
zh: '使用 Python 模块 pyfreerdp 测试主机可连接性'
|
||||
en: 'Ping by pyfreerdp module'
|
||||
ja: 'Pyfreerdpモジュールを使用してホストにPingする'
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.protocols | selectattr('name', 'equalto', 'ssh') | map(attribute='port') | first }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_secret_type: "{{ jms_account.secret_type }}"
|
||||
login_private_key_path: "{{ jms_account.private_key_path }}"
|
||||
become: "{{ custom_become | default(False) }}"
|
||||
|
||||
@@ -6,8 +6,10 @@ category:
|
||||
type:
|
||||
- all
|
||||
method: ping
|
||||
protocol: ssh
|
||||
|
||||
i18n:
|
||||
Ping by paramiko:
|
||||
zh: 使用 Python 模块 paramiko 测试主机可连接性
|
||||
en: Ping by paramiko module
|
||||
ja: Paramikoモジュールを使用してホストにPingする
|
||||
zh: '使用 Python 模块 paramiko 测试主机可连接性'
|
||||
en: 'Ping by paramiko module'
|
||||
ja: 'Paramikoモジュールを使用してホストにPingする'
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
check_ssl: "{{ jms_asset.spec_info.use_ssl and not jms_asset.spec_info.allow_invalid_cert }}"
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
@@ -10,8 +11,8 @@
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
check_hostname: "{{ omit if not jms_asset.spec_info.use_ssl else jms_asset.spec_info.allow_invalid_cert }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) }}"
|
||||
check_hostname: "{{ check_ssl if check_ssl else omit }}"
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default(omit) if check_ssl else omit }}"
|
||||
client_cert: "{{ jms_asset.secret_info.client_cert | default(omit) if check_ssl else omit }}"
|
||||
client_key: "{{ jms_asset.secret_info.client_key | default(omit) if check_ssl else omit }}"
|
||||
filter: version
|
||||
|
||||
@@ -2,9 +2,11 @@ import json
|
||||
from collections import defaultdict
|
||||
from copy import deepcopy
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.db.models import ChoicesMixin
|
||||
from jumpserver.utils import get_current_request
|
||||
from .category import Category
|
||||
from .cloud import CloudTypes
|
||||
from .custom import CustomTypes
|
||||
@@ -22,6 +24,8 @@ class AllTypes(ChoicesMixin):
|
||||
CloudTypes, WebTypes, CustomTypes, GPTTypes
|
||||
]
|
||||
_category_constrains = {}
|
||||
_automation_methods = None
|
||||
_current_language = settings.LANGUAGE_CODE
|
||||
|
||||
@classmethod
|
||||
def choices(cls):
|
||||
@@ -61,9 +65,28 @@ class AllTypes(ChoicesMixin):
|
||||
|
||||
@classmethod
|
||||
def get_automation_methods(cls):
|
||||
from assets.automations import platform_automation_methods as asset_methods
|
||||
from accounts.automations import platform_automation_methods as account_methods
|
||||
return asset_methods + account_methods
|
||||
from assets.automations import methods as asset
|
||||
from accounts.automations import methods as account
|
||||
|
||||
automation_methods = \
|
||||
asset.platform_automation_methods + \
|
||||
account.platform_automation_methods
|
||||
|
||||
request = get_current_request()
|
||||
if request is None:
|
||||
return automation_methods
|
||||
|
||||
language = request.LANGUAGE_CODE
|
||||
if cls._automation_methods is not None and language == cls._current_language:
|
||||
automation_methods = cls._automation_methods
|
||||
else:
|
||||
automation_methods = \
|
||||
asset.get_platform_automation_methods(asset.BASE_DIR, language) + \
|
||||
account.get_platform_automation_methods(account.BASE_DIR, language)
|
||||
|
||||
cls._current_language = language
|
||||
cls._automation_methods = automation_methods
|
||||
return cls._automation_methods
|
||||
|
||||
@classmethod
|
||||
def set_automation_methods(cls, category, tp_name, constraints):
|
||||
@@ -245,7 +268,7 @@ class AllTypes(ChoicesMixin):
|
||||
meta = {'type': 'category', 'category': category.value, '_type': category.value}
|
||||
category_node = cls.choice_to_node(category, 'ROOT', meta=meta)
|
||||
category_count = category_type_mapper.get(category, 0)
|
||||
category_node['name'] += f'({category_count})'
|
||||
category_node['name'] += f' ({category_count})'
|
||||
nodes.append(category_node)
|
||||
|
||||
# Type 格式化
|
||||
@@ -254,7 +277,7 @@ class AllTypes(ChoicesMixin):
|
||||
meta = {'type': 'type', 'category': category.value, '_type': tp.value}
|
||||
tp_node = cls.choice_to_node(tp, category_node['id'], opened=False, meta=meta)
|
||||
tp_count = category_type_mapper.get(category + '_' + tp, 0)
|
||||
tp_node['name'] += f'({tp_count})'
|
||||
tp_node['name'] += f' ({tp_count})'
|
||||
platforms = tp_platforms.get(category + '_' + tp, [])
|
||||
if not platforms:
|
||||
tp_node['isParent'] = False
|
||||
@@ -263,7 +286,7 @@ class AllTypes(ChoicesMixin):
|
||||
# Platform 格式化
|
||||
for p in platforms:
|
||||
platform_node = cls.platform_to_node(p, tp_node['id'], include_asset)
|
||||
platform_node['name'] += f'({platform_count.get(p.id, 0)})'
|
||||
platform_node['name'] += f' ({platform_count.get(p.id, 0)})'
|
||||
nodes.append(platform_node)
|
||||
return nodes
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@ from rest_framework import filters
|
||||
from rest_framework.compat import coreapi, coreschema
|
||||
|
||||
from assets.utils import get_node_from_request, is_query_node_all_assets
|
||||
from .models import Label
|
||||
|
||||
|
||||
class AssetByNodeFilterBackend(filters.BaseFilterBackend):
|
||||
@@ -64,65 +63,13 @@ class NodeFilterBackend(filters.BaseFilterBackend):
|
||||
query_all = is_query_node_all_assets(request)
|
||||
if query_all:
|
||||
return queryset.filter(
|
||||
Q(nodes__key__istartswith=f'{node.key}:') |
|
||||
Q(nodes__key__startswith=f'{node.key}:') |
|
||||
Q(nodes__key=node.key)
|
||||
).distinct()
|
||||
else:
|
||||
print("Query query origin: ", queryset.count())
|
||||
return queryset.filter(nodes__key=node.key).distinct()
|
||||
|
||||
|
||||
class LabelFilterBackend(filters.BaseFilterBackend):
|
||||
sep = ':'
|
||||
query_arg = 'label'
|
||||
|
||||
def get_schema_fields(self, view):
|
||||
example = self.sep.join(['os', 'linux'])
|
||||
return [
|
||||
coreapi.Field(
|
||||
name=self.query_arg, location='query', required=False,
|
||||
type='string', example=example, description=''
|
||||
)
|
||||
]
|
||||
|
||||
def get_query_labels(self, request):
|
||||
labels_query = request.query_params.getlist(self.query_arg)
|
||||
if not labels_query:
|
||||
return None
|
||||
|
||||
q = None
|
||||
for kv in labels_query:
|
||||
if '#' in kv:
|
||||
self.sep = '#'
|
||||
break
|
||||
|
||||
for kv in labels_query:
|
||||
if self.sep not in kv:
|
||||
continue
|
||||
key, value = kv.strip().split(self.sep)[:2]
|
||||
if not all([key, value]):
|
||||
continue
|
||||
if q:
|
||||
q |= Q(name=key, value=value)
|
||||
else:
|
||||
q = Q(name=key, value=value)
|
||||
if not q:
|
||||
return []
|
||||
labels = Label.objects.filter(q, is_active=True) \
|
||||
.values_list('id', flat=True)
|
||||
return labels
|
||||
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
labels = self.get_query_labels(request)
|
||||
if labels is None:
|
||||
return queryset
|
||||
if len(labels) == 0:
|
||||
return queryset.none()
|
||||
for label in labels:
|
||||
queryset = queryset.filter(labels=label)
|
||||
return queryset
|
||||
|
||||
|
||||
class IpInFilterBackend(filters.BaseFilterBackend):
|
||||
def filter_queryset(self, request, queryset, view):
|
||||
ips = request.query_params.get('ips')
|
||||
|
||||
@@ -123,7 +123,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(default=assets.models.asset.default_node, related_name='assets', to='assets.Node', verbose_name='Nodes'),
|
||||
field=models.ManyToManyField(default=assets.models.asset.default_node, related_name='assets', to='assets.Node', verbose_name='Node'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='systemuser',
|
||||
|
||||
@@ -50,7 +50,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(default=assets.models.default_node, related_name='assets', to='assets.Node', verbose_name='Nodes'),
|
||||
field=models.ManyToManyField(default=assets.models.default_node, related_name='assets', to='assets.Node', verbose_name='Node'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='systemuser',
|
||||
|
||||
@@ -31,7 +31,7 @@ class Migration(migrations.Migration):
|
||||
('type', models.CharField(max_length=16, verbose_name='Type')),
|
||||
('is_active', models.BooleanField(default=True, verbose_name='Is active')),
|
||||
('assets', models.ManyToManyField(blank=True, to='assets.Asset', verbose_name='Assets')),
|
||||
('nodes', models.ManyToManyField(blank=True, to='assets.Node', verbose_name='Nodes')),
|
||||
('nodes', models.ManyToManyField(blank=True, to='assets.Node', verbose_name='Node')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Automation task',
|
||||
|
||||
18
apps/assets/migrations/0126_remove_asset_labels.py
Normal file
18
apps/assets/migrations/0126_remove_asset_labels.py
Normal file
@@ -0,0 +1,18 @@
|
||||
# Generated by Django 4.1.10 on 2023-11-22 07:33
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('assets', '0125_auto_20231011_1053'),
|
||||
('labels', '0002_auto_20231103_1659'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='asset',
|
||||
name='labels',
|
||||
),
|
||||
]
|
||||
55
apps/assets/migrations/0127_automation_remove_account.py
Normal file
55
apps/assets/migrations/0127_automation_remove_account.py
Normal file
@@ -0,0 +1,55 @@
|
||||
# Generated by Django 4.1.10 on 2023-12-05 10:03
|
||||
from functools import reduce
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.db.models import F
|
||||
|
||||
|
||||
def migrate_automation_ansible_remove_account(apps, *args):
|
||||
automation_model = apps.get_model('assets', 'PlatformAutomation')
|
||||
automation_map = {
|
||||
('oracle',): 'remove_account_oracle',
|
||||
('windows',): 'remove_account_windows',
|
||||
('mongodb',): 'remove_account_mongodb',
|
||||
('linux', 'unix'): 'remove_account_posix',
|
||||
('sqlserver',): 'remove_account_sqlserver',
|
||||
('mysql', 'mariadb'): 'remove_account_mysql',
|
||||
('postgresql',): 'remove_account_postgresql',
|
||||
}
|
||||
|
||||
update_objs = []
|
||||
types = list(reduce(lambda x, y: x + y, automation_map.keys()))
|
||||
qs = automation_model.objects.filter(platform__type__in=types).annotate(tp=F('platform__type'))
|
||||
for automation in qs:
|
||||
for types, method in automation_map.items():
|
||||
if automation.tp in types:
|
||||
automation.remove_account_enabled = True
|
||||
automation.remove_account_method = method
|
||||
break
|
||||
update_objs.append(automation)
|
||||
automation_model.objects.bulk_update(update_objs, ['remove_account_enabled', 'remove_account_method'])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('assets', '0126_remove_asset_labels'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='platformautomation',
|
||||
name='remove_account_enabled',
|
||||
field=models.BooleanField(default=False, verbose_name='Remove account enabled'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='platformautomation',
|
||||
name='remove_account_method',
|
||||
field=models.TextField(blank=True, max_length=32, null=True, verbose_name='Remove account method'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='platformautomation',
|
||||
name='remove_account_params',
|
||||
field=models.JSONField(default=dict, verbose_name='Remove account params'),
|
||||
),
|
||||
migrations.RunPython(migrate_automation_ansible_remove_account)
|
||||
]
|
||||
@@ -13,7 +13,9 @@ from django.utils.translation import gettext_lazy as _
|
||||
from assets import const
|
||||
from common.db.fields import EncryptMixin
|
||||
from common.utils import lazyproperty
|
||||
from labels.mixins import LabeledMixin
|
||||
from orgs.mixins.models import OrgManager, JMSOrgBaseModel
|
||||
from rbac.models import ContentType
|
||||
from ..base import AbsConnectivity
|
||||
from ..platform import Platform
|
||||
|
||||
@@ -150,7 +152,7 @@ class JSONFilterMixin:
|
||||
return None
|
||||
|
||||
|
||||
class Asset(NodesRelationMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseModel):
|
||||
class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseModel):
|
||||
Category = const.Category
|
||||
Type = const.AllTypes
|
||||
|
||||
@@ -160,9 +162,8 @@ class Asset(NodesRelationMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseMode
|
||||
domain = models.ForeignKey("assets.Domain", null=True, blank=True, related_name='assets',
|
||||
verbose_name=_("Domain"), on_delete=models.SET_NULL)
|
||||
nodes = models.ManyToManyField('assets.Node', default=default_node, related_name='assets',
|
||||
verbose_name=_("Nodes"))
|
||||
verbose_name=_("Node"))
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Is active'))
|
||||
labels = models.ManyToManyField('assets.Label', blank=True, related_name='assets', verbose_name=_("Labels"))
|
||||
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
|
||||
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
|
||||
|
||||
@@ -171,6 +172,13 @@ class Asset(NodesRelationMixin, AbsConnectivity, JSONFilterMixin, JMSOrgBaseMode
|
||||
def __str__(self):
|
||||
return '{0.name}({0.address})'.format(self)
|
||||
|
||||
def get_labels(self):
|
||||
from labels.models import Label, LabeledResource
|
||||
res_type = ContentType.objects.get_for_model(self.__class__)
|
||||
label_ids = LabeledResource.objects.filter(res_type=res_type, res_id=self.id) \
|
||||
.values_list('label_id', flat=True)
|
||||
return Label.objects.filter(id__in=label_ids)
|
||||
|
||||
@staticmethod
|
||||
def get_spec_values(instance, fields):
|
||||
info = {}
|
||||
|
||||
@@ -15,7 +15,7 @@ from orgs.mixins.models import OrgModelMixin, JMSOrgBaseModel
|
||||
|
||||
class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
||||
accounts = models.JSONField(default=list, verbose_name=_("Accounts"))
|
||||
nodes = models.ManyToManyField('assets.Node', blank=True, verbose_name=_("Nodes"))
|
||||
nodes = models.ManyToManyField('assets.Node', blank=True, verbose_name=_("Node"))
|
||||
assets = models.ManyToManyField('assets.Asset', blank=True, verbose_name=_("Assets"))
|
||||
type = models.CharField(max_length=16, verbose_name=_('Type'))
|
||||
is_active = models.BooleanField(default=True, verbose_name=_("Is active"))
|
||||
|
||||
@@ -29,7 +29,7 @@ class CommandFilter(OrgModelMixin):
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', related_name='cmd_filters', blank=True,
|
||||
verbose_name=_("Nodes")
|
||||
verbose_name=_("Node")
|
||||
)
|
||||
assets = models.ManyToManyField(
|
||||
'assets.Asset', related_name='cmd_filters', blank=True,
|
||||
|
||||
@@ -6,6 +6,7 @@ from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from common.utils import get_logger
|
||||
from labels.mixins import LabeledMixin
|
||||
from orgs.mixins.models import JMSOrgBaseModel
|
||||
from .gateway import Gateway
|
||||
|
||||
@@ -14,7 +15,7 @@ logger = get_logger(__file__)
|
||||
__all__ = ['Domain']
|
||||
|
||||
|
||||
class Domain(JMSOrgBaseModel):
|
||||
class Domain(LabeledMixin, JMSOrgBaseModel):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -13,7 +13,7 @@ from django.db.transaction import atomic
|
||||
from django.utils.translation import gettext_lazy as _, gettext
|
||||
|
||||
from common.db.models import output_as_string
|
||||
from common.utils import get_logger
|
||||
from common.utils import get_logger, timeit
|
||||
from common.utils.lock import DistributedLock
|
||||
from orgs.mixins.models import OrgManager, JMSOrgBaseModel
|
||||
from orgs.models import Organization
|
||||
@@ -195,11 +195,6 @@ class FamilyMixin:
|
||||
ancestor_keys = self.get_ancestor_keys(with_self=with_self)
|
||||
return self.__class__.objects.filter(key__in=ancestor_keys)
|
||||
|
||||
# @property
|
||||
# def parent_key(self):
|
||||
# parent_key = ":".join(self.key.split(":")[:-1])
|
||||
# return parent_key
|
||||
|
||||
def compute_parent_key(self):
|
||||
return compute_parent_key(self.key)
|
||||
|
||||
@@ -349,29 +344,26 @@ class NodeAllAssetsMappingMixin:
|
||||
return 'ASSETS_ORG_NODE_ALL_ASSET_ids_MAPPING_{}'.format(org_id)
|
||||
|
||||
@classmethod
|
||||
@timeit
|
||||
def generate_node_all_asset_ids_mapping(cls, org_id):
|
||||
from .asset import Asset
|
||||
|
||||
logger.info(f'Generate node asset mapping: '
|
||||
f'thread={threading.get_ident()} '
|
||||
f'org_id={org_id}')
|
||||
logger.info(f'Generate node asset mapping: org_id={org_id}')
|
||||
t1 = time.time()
|
||||
with tmp_to_org(org_id):
|
||||
node_ids_key = Node.objects.annotate(
|
||||
char_id=output_as_string('id')
|
||||
).values_list('char_id', 'key')
|
||||
|
||||
# * 直接取出全部. filter(node__org_id=org_id)(大规模下会更慢)
|
||||
nodes_asset_ids = Asset.nodes.through.objects.all() \
|
||||
.annotate(char_node_id=output_as_string('node_id')) \
|
||||
.annotate(char_asset_id=output_as_string('asset_id')) \
|
||||
.values_list('char_node_id', 'char_asset_id')
|
||||
|
||||
node_id_ancestor_keys_mapping = {
|
||||
node_id: cls.get_node_ancestor_keys(node_key, with_self=True)
|
||||
for node_id, node_key in node_ids_key
|
||||
}
|
||||
|
||||
# * 直接取出全部. filter(node__org_id=org_id)(大规模下会更慢)
|
||||
nodes_asset_ids = cls.assets.through.objects.all() \
|
||||
.annotate(char_node_id=output_as_string('node_id')) \
|
||||
.annotate(char_asset_id=output_as_string('asset_id')) \
|
||||
.values_list('char_node_id', 'char_asset_id')
|
||||
|
||||
nodeid_assetsid_mapping = defaultdict(set)
|
||||
for node_id, asset_id in nodes_asset_ids:
|
||||
nodeid_assetsid_mapping[node_id].add(asset_id)
|
||||
@@ -386,7 +378,7 @@ class NodeAllAssetsMappingMixin:
|
||||
mapping[ancestor_key].update(asset_ids)
|
||||
|
||||
t3 = time.time()
|
||||
logger.info('t1-t2(DB Query): {} s, t3-t2(Generate mapping): {} s'.format(t2 - t1, t3 - t2))
|
||||
logger.info('Generate asset nodes mapping, DB query: {:.2f}s, mapping: {:.2f}s'.format(t2 - t1, t3 - t2))
|
||||
return mapping
|
||||
|
||||
|
||||
@@ -436,6 +428,7 @@ class NodeAssetsMixin(NodeAllAssetsMappingMixin):
|
||||
return asset_ids
|
||||
|
||||
@classmethod
|
||||
@timeit
|
||||
def get_nodes_all_assets(cls, *nodes):
|
||||
from .asset import Asset
|
||||
node_ids = set()
|
||||
@@ -559,11 +552,6 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
||||
def __str__(self):
|
||||
return self.full_value
|
||||
|
||||
# def __eq__(self, other):
|
||||
# if not other:
|
||||
# return False
|
||||
# return self.id == other.id
|
||||
#
|
||||
def __gt__(self, other):
|
||||
self_key = [int(k) for k in self.key.split(':')]
|
||||
other_key = [int(k) for k in other.key.split(':')]
|
||||
|
||||
@@ -9,6 +9,7 @@ from common.db.models import JMSBaseModel
|
||||
__all__ = ['Platform', 'PlatformProtocol', 'PlatformAutomation']
|
||||
|
||||
from common.utils import lazyproperty
|
||||
from labels.mixins import LabeledMixin
|
||||
|
||||
|
||||
class PlatformProtocol(models.Model):
|
||||
@@ -71,10 +72,16 @@ class PlatformAutomation(models.Model):
|
||||
max_length=32, blank=True, null=True, verbose_name=_("Gather facts method")
|
||||
)
|
||||
gather_accounts_params = models.JSONField(default=dict, verbose_name=_("Gather facts params"))
|
||||
|
||||
remove_account_enabled = models.BooleanField(default=False, verbose_name=_("Remove account enabled"))
|
||||
remove_account_method = models.TextField(
|
||||
max_length=32, blank=True, null=True, verbose_name=_("Remove account method")
|
||||
)
|
||||
remove_account_params = models.JSONField(default=dict, verbose_name=_("Remove account params"))
|
||||
platform = models.OneToOneField('Platform', on_delete=models.CASCADE, related_name='automation', null=True)
|
||||
|
||||
|
||||
class Platform(JMSBaseModel):
|
||||
class Platform(LabeledMixin, JMSBaseModel):
|
||||
"""
|
||||
对资产提供 约束和默认值
|
||||
对资产进行抽象
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from rest_framework.pagination import LimitOffsetPagination
|
||||
from rest_framework.request import Request
|
||||
|
||||
from common.utils import get_logger
|
||||
from assets.models import Node
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -28,6 +28,7 @@ class AssetPaginationBase(LimitOffsetPagination):
|
||||
'key', 'all', 'show_current_asset',
|
||||
'cache_policy', 'display', 'draw',
|
||||
'order', 'node', 'node_id', 'fields_size',
|
||||
'asset'
|
||||
}
|
||||
for k, v in self._request.query_params.items():
|
||||
if k not in exclude_query_params and v is not None:
|
||||
|
||||
@@ -2,11 +2,10 @@
|
||||
#
|
||||
|
||||
from .asset import *
|
||||
from .label import *
|
||||
from .node import *
|
||||
from .gateway import *
|
||||
from .automations import *
|
||||
from .cagegory import *
|
||||
from .domain import *
|
||||
from .favorite_asset import *
|
||||
from .gateway import *
|
||||
from .node import *
|
||||
from .platform import *
|
||||
from .cagegory import *
|
||||
from .automations import *
|
||||
|
||||
@@ -11,13 +11,14 @@ from accounts.serializers import AccountSerializer
|
||||
from common.const import UUID_PATTERN
|
||||
from common.serializers import (
|
||||
WritableNestedModelSerializer, SecretReadableMixin,
|
||||
CommonModelSerializer, MethodSerializer
|
||||
CommonModelSerializer, MethodSerializer, ResourceLabelsMixin
|
||||
)
|
||||
from common.serializers.common import DictSerializer
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from labels.models import Label
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ...const import Category, AllTypes
|
||||
from ...models import Asset, Node, Platform, Label, Protocol
|
||||
from ...models import Asset, Node, Platform, Protocol
|
||||
|
||||
__all__ = [
|
||||
'AssetSerializer', 'AssetSimpleSerializer', 'MiniAssetSerializer',
|
||||
@@ -99,7 +100,10 @@ class AssetAccountSerializer(AccountSerializer):
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = [
|
||||
f for f in AccountSerializer.Meta.fields
|
||||
if f not in ['spec_info']
|
||||
if f not in [
|
||||
'spec_info', 'connectivity', 'labels', 'created_by',
|
||||
'date_update', 'date_created'
|
||||
]
|
||||
]
|
||||
extra_kwargs = {
|
||||
**AccountSerializer.Meta.extra_kwargs,
|
||||
@@ -117,10 +121,9 @@ class AccountSecretSerializer(SecretReadableMixin, CommonModelSerializer):
|
||||
}
|
||||
|
||||
|
||||
class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSerializer):
|
||||
class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, WritableNestedModelSerializer):
|
||||
category = LabeledChoiceField(choices=Category.choices, read_only=True, label=_('Category'))
|
||||
type = LabeledChoiceField(choices=AllTypes.choices(), read_only=True, label=_('Type'))
|
||||
labels = AssetLabelSerializer(many=True, required=False, label=_('Label'))
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Account'))
|
||||
nodes_display = serializers.ListField(read_only=False, required=False, label=_("Node path"))
|
||||
@@ -201,10 +204,14 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset.prefetch_related('domain', 'nodes', 'labels', 'protocols') \
|
||||
queryset = queryset.prefetch_related('domain', 'nodes', 'protocols', ) \
|
||||
.prefetch_related('platform', 'platform__automation') \
|
||||
.annotate(category=F("platform__category")) \
|
||||
.annotate(type=F("platform__type"))
|
||||
if queryset.model is Asset:
|
||||
queryset = queryset.prefetch_related('labels__label', 'labels')
|
||||
else:
|
||||
queryset = queryset.prefetch_related('asset_ptr__labels__label', 'asset_ptr__labels')
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
@@ -374,7 +381,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, WritableNestedModelSeriali
|
||||
|
||||
|
||||
class DetailMixin(serializers.Serializer):
|
||||
accounts = AssetAccountSerializer(many=True, required=False, label=_('Accounts'))
|
||||
spec_info = MethodSerializer(label=_('Spec info'), read_only=True)
|
||||
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
|
||||
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||
@@ -389,8 +395,7 @@ class DetailMixin(serializers.Serializer):
|
||||
def get_field_names(self, declared_fields, info):
|
||||
names = super().get_field_names(declared_fields, info)
|
||||
names.extend([
|
||||
'accounts', 'gathered_info', 'spec_info',
|
||||
'auto_config',
|
||||
'gathered_info', 'spec_info', 'auto_config',
|
||||
])
|
||||
return names
|
||||
|
||||
|
||||
@@ -1,23 +1,22 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db.models import Count
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from common.serializers import ResourceLabelsMixin
|
||||
from common.serializers.fields import ObjectRelatedField
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .gateway import GatewayWithAccountSecretSerializer
|
||||
from ..models import Domain, Asset
|
||||
from ..models import Domain
|
||||
|
||||
__all__ = ['DomainSerializer', 'DomainWithGatewaySerializer']
|
||||
__all__ = ['DomainSerializer', 'DomainWithGatewaySerializer', 'DomainListSerializer']
|
||||
|
||||
|
||||
class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
class DomainSerializer(ResourceLabelsMixin, BulkOrgResourceModelSerializer):
|
||||
gateways = ObjectRelatedField(
|
||||
many=True, required=False, label=_('Gateway'), read_only=True,
|
||||
)
|
||||
assets = ObjectRelatedField(
|
||||
many=True, required=False, queryset=Asset.objects, label=_('Asset')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Domain
|
||||
@@ -29,7 +28,9 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
assets = data['assets']
|
||||
assets = data.get('assets')
|
||||
if assets is None:
|
||||
return data
|
||||
gateway_ids = [str(i['id']) for i in data['gateways']]
|
||||
data['assets'] = [i for i in assets if str(i['id']) not in gateway_ids]
|
||||
return data
|
||||
@@ -41,6 +42,26 @@ class DomainSerializer(BulkOrgResourceModelSerializer):
|
||||
instance = super().update(instance, validated_data)
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = queryset \
|
||||
.prefetch_related('labels', 'labels__label')
|
||||
return queryset
|
||||
|
||||
|
||||
class DomainListSerializer(DomainSerializer):
|
||||
assets_amount = serializers.IntegerField(label=_('Assets amount'), read_only=True)
|
||||
|
||||
class Meta(DomainSerializer.Meta):
|
||||
fields = list(set(DomainSerializer.Meta.fields + ['assets_amount']) - {'assets'})
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = queryset.annotate(
|
||||
assets_amount=Count('assets'),
|
||||
)
|
||||
return queryset
|
||||
|
||||
|
||||
class DomainWithGatewaySerializer(serializers.ModelSerializer):
|
||||
gateways = GatewayWithAccountSecretSerializer(many=True, read_only=True)
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.db.models import Count
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from ..models import Label
|
||||
|
||||
|
||||
class LabelSerializer(BulkOrgResourceModelSerializer):
|
||||
asset_count = serializers.ReadOnlyField(label=_("Assets amount"))
|
||||
|
||||
class Meta:
|
||||
model = Label
|
||||
fields_mini = ['id', 'name']
|
||||
fields_small = fields_mini + [
|
||||
'value', 'category', 'is_active',
|
||||
'date_created', 'comment',
|
||||
]
|
||||
fields_m2m = ['asset_count', 'assets']
|
||||
fields = fields_small + fields_m2m
|
||||
read_only_fields = (
|
||||
'category', 'date_created', 'asset_count',
|
||||
)
|
||||
extra_kwargs = {
|
||||
'assets': {'required': False, 'label': _('Asset')}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = queryset.prefetch_related('assets') \
|
||||
.annotate(asset_count=Count('assets'))
|
||||
return queryset
|
||||
|
||||
|
||||
class LabelDistinctSerializer(BulkOrgResourceModelSerializer):
|
||||
value = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = Label
|
||||
fields = ("name", "value")
|
||||
|
||||
@staticmethod
|
||||
def get_value(obj):
|
||||
labels = Label.objects.filter(name=obj["name"])
|
||||
return ', '.join([label.value for label in labels])
|
||||
@@ -5,7 +5,7 @@ from rest_framework.validators import UniqueValidator
|
||||
|
||||
from common.serializers import (
|
||||
WritableNestedModelSerializer, type_field_map, MethodSerializer,
|
||||
DictSerializer, create_serializer_class
|
||||
DictSerializer, create_serializer_class, ResourceLabelsMixin
|
||||
)
|
||||
from common.serializers.fields import LabeledChoiceField
|
||||
from common.utils import lazyproperty
|
||||
@@ -123,7 +123,7 @@ class PlatformCustomField(serializers.Serializer):
|
||||
choices = serializers.ListField(default=list, label=_("Choices"), required=False)
|
||||
|
||||
|
||||
class PlatformSerializer(WritableNestedModelSerializer):
|
||||
class PlatformSerializer(ResourceLabelsMixin, WritableNestedModelSerializer):
|
||||
SU_METHOD_CHOICES = [
|
||||
("sudo", "sudo su -"),
|
||||
("su", "su - "),
|
||||
@@ -160,6 +160,7 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||
fields = fields_small + [
|
||||
"protocols", "domain_enabled", "su_enabled",
|
||||
"su_method", "automation", "comment", "custom_fields",
|
||||
"labels"
|
||||
] + read_only_fields
|
||||
extra_kwargs = {
|
||||
"su_enabled": {"label": _('Su enabled')},
|
||||
@@ -190,7 +191,6 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||
def add_type_choices(self, name, label):
|
||||
tp = self.fields['type']
|
||||
tp.choices[name] = label
|
||||
tp.choice_mapper[name] = label
|
||||
tp.choice_strings_to_values[name] = label
|
||||
|
||||
@lazyproperty
|
||||
@@ -199,13 +199,6 @@ class PlatformSerializer(WritableNestedModelSerializer):
|
||||
constraints = AllTypes.get_constraints(category, tp)
|
||||
return constraints
|
||||
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
queryset = queryset.prefetch_related(
|
||||
'protocols', 'automation'
|
||||
)
|
||||
return queryset
|
||||
|
||||
def validate_protocols(self, protocols):
|
||||
if not protocols:
|
||||
raise serializers.ValidationError(_("Protocols is required"))
|
||||
|
||||
@@ -80,10 +80,11 @@ RELATED_NODE_IDS = '_related_node_ids'
|
||||
|
||||
@receiver(pre_delete, sender=Asset)
|
||||
def on_asset_delete(instance: Asset, using, **kwargs):
|
||||
logger.debug("Asset pre delete signal recv: {}".format(instance))
|
||||
node_ids = Node.objects.filter(assets=instance) \
|
||||
.distinct().values_list('id', flat=True)
|
||||
setattr(instance, RELATED_NODE_IDS, node_ids)
|
||||
node_ids = list(node_ids)
|
||||
logger.debug("Asset pre delete signal recv: {}, node_ids: {}".format(instance, node_ids))
|
||||
setattr(instance, RELATED_NODE_IDS, list(node_ids))
|
||||
m2m_changed.send(
|
||||
sender=Asset.nodes.through, instance=instance,
|
||||
reverse=False, model=Node, pk_set=node_ids,
|
||||
@@ -93,8 +94,8 @@ def on_asset_delete(instance: Asset, using, **kwargs):
|
||||
|
||||
@receiver(post_delete, sender=Asset)
|
||||
def on_asset_post_delete(instance: Asset, using, **kwargs):
|
||||
logger.debug("Asset post delete signal recv: {}".format(instance))
|
||||
node_ids = getattr(instance, RELATED_NODE_IDS, [])
|
||||
logger.debug("Asset post delete signal recv: {}, node_ids: {}".format(instance, node_ids))
|
||||
if node_ids:
|
||||
m2m_changed.send(
|
||||
sender=Asset.nodes.through, instance=instance, reverse=False,
|
||||
|
||||
@@ -15,8 +15,8 @@ from ..tasks import check_node_assets_amount_task
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
@on_transaction_commit
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
@on_transaction_commit
|
||||
def on_node_asset_change(sender, action, instance, reverse, pk_set, **kwargs):
|
||||
# 不允许 `pre_clear` ,因为该信号没有 `pk_set`
|
||||
# [官网](https://docs.djangoproject.com/en/3.1/ref/signals/#m2m-changed)
|
||||
@@ -37,7 +37,7 @@ def on_node_asset_change(sender, action, instance, reverse, pk_set, **kwargs):
|
||||
update_nodes_assets_amount(node_ids=node_ids)
|
||||
|
||||
|
||||
@merge_delay_run(ttl=5)
|
||||
@merge_delay_run(ttl=30)
|
||||
def update_nodes_assets_amount(node_ids=()):
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
nodes = Node.get_ancestor_queryset(nodes)
|
||||
|
||||
@@ -21,7 +21,7 @@ logger = get_logger(__name__)
|
||||
node_assets_mapping_pub_sub = lazy(lambda: RedisPubSub('fm.node_asset_mapping'), RedisPubSub)()
|
||||
|
||||
|
||||
@merge_delay_run(ttl=5)
|
||||
@merge_delay_run(ttl=30)
|
||||
def expire_node_assets_mapping(org_ids=()):
|
||||
logger.debug("Recv asset nodes changed signal, expire memery node asset mapping")
|
||||
# 所有进程清除(自己的 memory 数据)
|
||||
@@ -53,8 +53,9 @@ def on_node_post_delete(sender, instance, **kwargs):
|
||||
|
||||
|
||||
@receiver(m2m_changed, sender=Asset.nodes.through)
|
||||
def on_node_asset_change(sender, instance, **kwargs):
|
||||
expire_node_assets_mapping(org_ids=(instance.org_id,))
|
||||
def on_node_asset_change(sender, instance, action='pre_remove', **kwargs):
|
||||
if action.startswith('post'):
|
||||
expire_node_assets_mapping(org_ids=(instance.org_id,))
|
||||
|
||||
|
||||
@receiver(django_ready)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from django.urls import path
|
||||
from rest_framework_bulk.routes import BulkRouter
|
||||
|
||||
from labels.api import LabelViewSet
|
||||
from .. import api
|
||||
|
||||
app_name = 'assets'
|
||||
@@ -17,12 +18,12 @@ router.register(r'clouds', api.CloudViewSet, 'cloud')
|
||||
router.register(r'gpts', api.GPTViewSet, 'gpt')
|
||||
router.register(r'customs', api.CustomViewSet, 'custom')
|
||||
router.register(r'platforms', api.AssetPlatformViewSet, 'platform')
|
||||
router.register(r'labels', api.LabelViewSet, 'label')
|
||||
router.register(r'nodes', api.NodeViewSet, 'node')
|
||||
router.register(r'domains', api.DomainViewSet, 'domain')
|
||||
router.register(r'gateways', api.GatewayViewSet, 'gateway')
|
||||
router.register(r'favorite-assets', api.FavoriteAssetViewSet, 'favorite-asset')
|
||||
router.register(r'protocol-settings', api.PlatformProtocolViewSet, 'protocol-setting')
|
||||
router.register(r'labels', LabelViewSet, 'label')
|
||||
|
||||
urlpatterns = [
|
||||
# path('assets/<uuid:pk>/gateways/', api.AssetGatewayListApi.as_view(), name='asset-gateway-list'),
|
||||
|
||||
@@ -4,7 +4,6 @@ from urllib.parse import urlencode, urlparse
|
||||
from kubernetes import client
|
||||
from kubernetes.client import api_client
|
||||
from kubernetes.client.api import core_v1_api
|
||||
from kubernetes.client.exceptions import ApiException
|
||||
from sshtunnel import SSHTunnelForwarder, BaseSSHTunnelForwarderError
|
||||
|
||||
from common.utils import get_logger
|
||||
@@ -88,8 +87,9 @@ class KubernetesClient:
|
||||
if hasattr(self, func_name):
|
||||
try:
|
||||
data = getattr(self, func_name)(*args)
|
||||
except ApiException as e:
|
||||
logger.error(e.reason)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise e
|
||||
|
||||
if self.server:
|
||||
self.server.stop()
|
||||
|
||||
@@ -5,6 +5,7 @@ from importlib import import_module
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import F, Value, CharField, Q
|
||||
from django.db.models.functions import Cast
|
||||
from django.http import HttpResponse, FileResponse
|
||||
from django.utils.encoding import escape_uri_path
|
||||
from rest_framework import generics
|
||||
@@ -40,6 +41,7 @@ from .serializers import (
|
||||
PasswordChangeLogSerializer, ActivityUnionLogSerializer,
|
||||
FileSerializer, UserSessionSerializer
|
||||
)
|
||||
from .utils import construct_userlogin_usernames
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -125,15 +127,16 @@ class UserLoginCommonMixin:
|
||||
|
||||
class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
|
||||
@staticmethod
|
||||
def get_org_members():
|
||||
users = current_org.get_members().values_list('username', flat=True)
|
||||
def get_org_member_usernames():
|
||||
user_queryset = current_org.get_members()
|
||||
users = construct_userlogin_usernames(user_queryset)
|
||||
return users
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
if current_org.is_root():
|
||||
return queryset
|
||||
users = self.get_org_members()
|
||||
users = self.get_org_member_usernames()
|
||||
queryset = queryset.filter(username__in=users)
|
||||
return queryset
|
||||
|
||||
@@ -163,7 +166,7 @@ class ResourceActivityAPIView(generics.ListAPIView):
|
||||
q |= Q(user=str(user))
|
||||
queryset = OperateLog.objects.filter(q, org_q).annotate(
|
||||
r_type=Value(ActivityChoices.operate_log, CharField()),
|
||||
r_detail_id=F('id'), r_detail=Value(None, CharField()),
|
||||
r_detail_id=Cast(F('id'), CharField()), r_detail=Value(None, CharField()),
|
||||
r_user=F('user'), r_action=F('action'),
|
||||
).values(*fields)[:limit]
|
||||
return queryset
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user