Compare commits

..

33 Commits
dev ... v4.8.0

Author SHA1 Message Date
Bai
6d66ba5288 perf: keep top 2025-03-27 17:51:22 +08:00
Bryan
ad5460dab8
Merge pull request #15086 from jumpserver/dev
v4.8.0
2025-03-20 18:44:44 +08:00
Bryan
4d37dca0de
Merge pull request #14901 from jumpserver/dev
v4.7.0
2025-02-20 10:21:16 +08:00
Bryan
2ca4002624
Merge pull request #14813 from jumpserver/dev
v4.6.0
2025-01-15 14:38:17 +08:00
Bryan
053d640e4c
Merge pull request #14699 from jumpserver/dev
v4.5.0
2024-12-19 16:04:45 +08:00
Bryan
f3acc28ded
Merge pull request #14697 from jumpserver/dev
v4.5.0
2024-12-19 15:57:11 +08:00
Bryan
25987545db
Merge pull request #14511 from jumpserver/dev
v4.4.0
2024-11-21 19:00:35 +08:00
Bryan
6720ecc6e0
Merge pull request #14319 from jumpserver/dev
v4.3.0
2024-10-17 14:55:38 +08:00
老广
0b3a7bb020
Merge pull request #14203 from jumpserver/dev
merge: from dev to master
2024-09-19 19:37:19 +08:00
Bryan
56373e362b
Merge pull request #13988 from jumpserver/dev
v4.1.0
2024-08-16 18:40:35 +08:00
Bryan
02fc045370
Merge pull request #13600 from jumpserver/dev
v4.0.0
2024-07-03 19:04:35 +08:00
Bryan
e4ac73896f
Merge pull request #13452 from jumpserver/dev
v3.10.11-lts
2024-06-19 16:01:26 +08:00
Bryan
1518f792d6
Merge pull request #13236 from jumpserver/dev
v3.10.10-lts
2024-05-16 16:04:07 +08:00
Bai
67277dd622 fix: 修复仪表盘会话排序数量都是 1 的问题 2024-04-22 19:42:33 +08:00
Bryan
82e7f020ea
Merge pull request #13094 from jumpserver/dev
v3.10.9 (dev to master)
2024-04-22 19:39:53 +08:00
Bryan
f20b9e01ab
Merge pull request #13062 from jumpserver/dev
v3.10.8 dev to master
2024-04-18 18:01:20 +08:00
Bryan
8cf8a3701b
Merge pull request #13059 from jumpserver/dev
v3.10.8
2024-04-18 17:16:37 +08:00
Bryan
7ba24293d1
Merge pull request #12736 from jumpserver/pr@dev@master_fix
fix: 解决冲突
2024-02-29 16:38:43 +08:00
Bai
f10114c9ed fix: 解决冲突 2024-02-29 16:37:10 +08:00
Bryan
cf31cbfb07
Merge pull request #12729 from jumpserver/dev
v3.10.4
2024-02-29 16:19:59 +08:00
wangruidong
0edad24d5d fix: 资产过期消息提示发送失败 2024-02-04 11:41:48 +08:00
ibuler
1f1c1a9157 fix: 修复定时检测用户是否活跃任务无法执行的问题 2024-01-23 09:28:38 +00:00
feng
6c9d271ae1 fix: redis 密码有特殊字符celery beat启动失败 2024-01-22 06:18:34 +00:00
Bai
6ff852e225 perf: 修复 Count 时没有去重的问题 2024-01-22 06:16:25 +00:00
Bryan
baa75dc735
Merge pull request #12566 from jumpserver/master
v3.10.2
2024-01-17 07:34:28 -04:00
Bryan
8a9f0436b8
Merge pull request #12565 from jumpserver/dev
v3.10.2
2024-01-17 07:23:30 -04:00
Bryan
a9620a3cbe
Merge pull request #12461 from jumpserver/master
v3.10.1
2023-12-29 11:33:05 +05:00
Bryan
769e7dc8a0
Merge pull request #12460 from jumpserver/dev
v3.10.1
2023-12-29 11:20:36 +05:00
Bryan
2a70449411
Merge pull request #12458 from jumpserver/dev
v3.10.1
2023-12-29 11:01:13 +05:00
Bryan
8df720f19e
Merge pull request #12401 from jumpserver/dev
v3.10
2023-12-21 15:14:19 +05:00
老广
dabbb45f6e
Merge pull request #12144 from jumpserver/dev
v3.9.0
2023-11-16 18:23:05 +08:00
Bryan
ce24c1c3fd
Merge pull request #11914 from jumpserver/dev
v3.8.0
2023-10-19 03:37:39 -05:00
Bryan
3c54c82ce9
Merge pull request #11636 from jumpserver/dev
v3.7.0
2023-09-21 17:02:48 +08:00
271 changed files with 16352 additions and 25570 deletions

View File

@ -9,5 +9,3 @@ celerybeat.pid
apps/xpack/.git apps/xpack/.git
.history/ .history/
.idea .idea
.venv/
.env

4
.gitattributes vendored
View File

@ -0,0 +1,4 @@
*.mmdb filter=lfs diff=lfs merge=lfs -text
*.mo filter=lfs diff=lfs merge=lfs -text
*.ipdb filter=lfs diff=lfs merge=lfs -text
leak_passwords.db filter=lfs diff=lfs merge=lfs -text

View File

@ -1,10 +0,0 @@
version: 2
updates:
- package-ecosystem: "uv"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:30"
timezone: "Asia/Shanghai"
target-branch: dev

View File

@ -2,14 +2,10 @@ name: Translate README
on: on:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
source_readme:
description: "Source README"
required: false
default: "./readmes/README.en.md"
target_langs: target_langs:
description: "Target Languages" description: "Target Languages"
required: false required: false
default: "zh-hans,zh-hant,ja,pt-br,es,ru" default: "zh-hans,zh-hant,ja,pt-br"
gen_dir_path: gen_dir_path:
description: "Generate Dir Name" description: "Generate Dir Name"
required: false required: false
@ -38,7 +34,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }} GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }} OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
GPT_MODE: ${{ github.event.inputs.gpt_mode }} GPT_MODE: ${{ github.event.inputs.gpt_mode }}
SOURCE_README: ${{ github.event.inputs.source_readme }}
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }} TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
PUSH_BRANCH: ${{ github.event.inputs.push_branch }} PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }} GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}

3
.gitignore vendored
View File

@ -46,6 +46,3 @@ test.py
.test/ .test/
*.mo *.mo
apps.iml apps.iml
*.db
*.mmdb
*.ipdb

View File

@ -1,4 +1,4 @@
FROM jumpserver/core-base:20250427_062456 AS stage-build FROM jumpserver/core-base:20250224_065619 AS stage-build
ARG VERSION ARG VERSION

View File

@ -1,6 +1,6 @@
FROM python:3.11-slim-bullseye FROM python:3.11-slim-bullseye
ARG TARGETARCH ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies # Install APT dependencies
ARG DEPENDENCIES=" \ ARG DEPENDENCIES=" \
ca-certificates \ ca-certificates \
@ -43,19 +43,18 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR} ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \ RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=poetry.lock,target=poetry.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \ --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \ --mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \ --mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \ set -ex \
&& uv venv \ && python3 -m venv /opt/py3 \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \ && pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& ln -sf $(pwd)/.venv /opt/py3 \ && . /opt/py3/bin/activate \
&& bash utils/static_files.sh \ && poetry config virtualenvs.create false \
&& bash clean_site_packages.sh && poetry install --no-cache --only main \
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
&& bash clean_site_packages.sh \
&& poetry cache clear pypi --all

View File

@ -24,7 +24,11 @@ RUN set -ex \
WORKDIR /opt/jumpserver WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
COPY poetry.lock pyproject.toml ./
RUN set -ex \ RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack && . /opt/py3/bin/activate \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& poetry install --only xpack \
&& poetry cache clear pypi --all

View File

@ -1,18 +1,16 @@
<div align="center"> <div align="center">
<a name="readme-top"></a> <a name="readme-top"></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a> <a href="https://jumpserver.org/index-en.html"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM tool (Bastion Host) ## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link] [![][license-shield]][license-link]
[![][docs-shield]][docs-link]
[![][deepwiki-shield]][deepwiki-link]
[![][discord-shield]][discord-link] [![][discord-shield]][discord-link]
[![][docker-shield]][docker-link] [![][docker-shield]][docker-link]
[![][github-release-shield]][github-release-link] [![][github-release-shield]][github-release-link]
[![][github-stars-shield]][github-stars-link] [![][github-stars-shield]][github-stars-link]
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md) [English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md)
</div> </div>
<br/> <br/>
@ -21,13 +19,7 @@
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser. JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
![JumpServer Overview](https://github.com/jumpserver/jumpserver/assets/32935519/35a371cb-8590-40ed-88ec-f351f8cf9045)
<picture>
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
</picture>
## Quickstart ## Quickstart
@ -44,19 +36,18 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
[![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart") [![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
## Screenshots ## Screenshots
<table style="border-collapse: collapse; border: 1px solid black;"> <table style="border-collapse: collapse; border: 1px solid black;">
<tr> <tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td> <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>    
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td> <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
</tr> </tr>
<tr> <tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>      <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td> <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
</tr> </tr>
<tr> <tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td> <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td> <td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
@ -78,9 +69,9 @@ JumpServer consists of multiple key components, which collectively form the func
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector | | [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector | | [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB | | [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector | | [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector | | [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector | | [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition | | [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
@ -90,6 +81,12 @@ JumpServer consists of multiple key components, which collectively form the func
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines. Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
## Security
JumpServer is a mission critical product. Please refer to the Basic Security Recommendations for installation and deployment. If you encounter any security-related issues, please contact us directly:
- Email: support@fit2cloud.com
## License ## License
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved. Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
@ -103,7 +100,6 @@ Unless required by applicable law or agreed to in writing, software distributed
<!-- JumpServer official link --> <!-- JumpServer official link -->
[docs-link]: https://jumpserver.com/docs [docs-link]: https://jumpserver.com/docs
[discord-link]: https://discord.com/invite/W6vYXmAQG2 [discord-link]: https://discord.com/invite/W6vYXmAQG2
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md [contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
<!-- JumpServer Other link--> <!-- JumpServer Other link-->
@ -114,10 +110,10 @@ Unless required by applicable law or agreed to in writing, software distributed
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues [github-issues-link]: https://github.com/jumpserver/jumpserver/issues
<!-- Shield link--> <!-- Shield link-->
[docs-shield]: https://img.shields.io/badge/documentation-148F76
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver [github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square    [github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg [docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver [license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb [discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
<!-- Image link -->

View File

@ -5,7 +5,8 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
如果你发现安全问题,请直接联系我们,我们携手让世界更好: 如果你发现安全问题,请直接联系我们,我们携手让世界更好:
- ibuler@fit2cloud.com - ibuler@fit2cloud.com
- support@lxware.hk - support@fit2cloud.com
- 400-052-0755
# Security Policy # Security Policy
@ -15,5 +16,6 @@ JumpServer is a security product, The installation and development should follow
All security bugs should be reported to the contact as below: All security bugs should be reported to the contact as below:
- ibuler@fit2cloud.com - ibuler@fit2cloud.com
- support@lxware.hk - support@fit2cloud.com
- 400-052-0755

View File

@ -46,16 +46,6 @@ class AccountViewSet(OrgBulkModelViewSet):
} }
export_as_zip = True export_as_zip = True
def get_queryset(self):
queryset = super().get_queryset()
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
if not asset_id:
return queryset
asset = get_object_or_404(Asset, pk=asset_id)
queryset = asset.all_accounts.all()
return queryset
@action(methods=['get'], detail=False, url_path='su-from-accounts') @action(methods=['get'], detail=False, url_path='su-from-accounts')
def su_from_accounts(self, request, *args, **kwargs): def su_from_accounts(self, request, *args, **kwargs):
account_id = request.query_params.get('account') account_id = request.query_params.get('account')
@ -127,7 +117,7 @@ class AccountViewSet(OrgBulkModelViewSet):
self.model.objects.create(**account_data) self.model.objects.create(**account_data)
success_count += 1 success_count += 1
except Exception as e: except Exception as e:
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}') logger.debug(f'{ "Move" if move else "Copy" } to assets error: {e}')
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'} creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()] results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]

View File

@ -62,7 +62,8 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
) )
def get_once_secret(self, request, *args, **kwargs): def get_once_secret(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
return Response(data={'id': instance.id, 'secret': instance.secret}) secret = instance.get_secret()
return Response(data={'id': instance.id, 'secret': secret})
@action(['GET'], detail=False, url_path='account-secret', @action(['GET'], detail=False, url_path='account-secret',
permission_classes=[RBACPermission]) permission_classes=[RBACPermission])

View File

@ -17,7 +17,7 @@ from orgs.mixins import generics
__all__ = [ __all__ = [
'AutomationAssetsListApi', 'AutomationRemoveAssetApi', 'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi', 'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
'AutomationExecutionViewSet' 'AutomationExecutionViewSet', 'RecordListMixin'
] ]
@ -39,10 +39,9 @@ class AutomationAssetsListApi(generics.ListAPIView):
return assets return assets
class AutomationRemoveAssetApi(generics.UpdateAPIView): class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
@ -57,10 +56,9 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
return Response({'msg': 'ok'}) return Response({'msg': 'ok'})
class AutomationAddAssetApi(generics.UpdateAPIView): class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
instance = self.get_object() instance = self.get_object()
@ -74,10 +72,9 @@ class AutomationAddAssetApi(generics.UpdateAPIView):
return Response({"error": serializer.errors}) return Response({"error": serializer.errors})
class AutomationNodeAddRemoveApi(generics.UpdateAPIView): class AutomationNodeAddRemoveApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation model = BaseAutomation
serializer_class = serializers.UpdateNodeSerializer serializer_class = serializers.UpdateNodeSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs): def update(self, request, *args, **kwargs):
action_params = ['add', 'remove'] action_params = ['add', 'remove']
@ -127,3 +124,12 @@ class AutomationExecutionViewSet(
execution = self.get_object() execution = self.get_object()
report = execution.manager.gen_report() report = execution.manager.gen_report()
return HttpResponse(report) return HttpResponse(report)
class RecordListMixin:
def list(self, request, *args, **kwargs):
try:
response = super().list(request, *args, **kwargs)
except Exception as e:
response = Response({'detail': str(e)}, status=status.HTTP_400_BAD_REQUEST)
return response

View File

@ -16,7 +16,7 @@ from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from rbac.permissions import RBACPermission from rbac.permissions import RBACPermission
from .base import ( from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi, AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
) )
__all__ = [ __all__ = [
@ -35,7 +35,7 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
serializer_class = serializers.ChangeSecretAutomationSerializer serializer_class = serializers.ChangeSecretAutomationSerializer
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet): class ChangeSecretRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = ChangeSecretRecordFilterSet filterset_class = ChangeSecretRecordFilterSet
permission_classes = [RBACPermission, IsValidLicense] permission_classes = [RBACPermission, IsValidLicense]
search_fields = ('asset__address', 'account__username') search_fields = ('asset__address', 'account__username')

View File

@ -147,7 +147,6 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
serializer_class = serializers.CheckAccountEngineSerializer serializer_class = serializers.CheckAccountEngineSerializer
permission_classes = [RBACPermission, IsValidLicense] permission_classes = [RBACPermission, IsValidLicense]
perm_model = CheckAccountEngine perm_model = CheckAccountEngine
http_method_names = ['get', 'options']
def get_queryset(self): def get_queryset(self):
return CheckAccountEngine.get_default_engines() return CheckAccountEngine.get_default_engines()

View File

@ -9,7 +9,7 @@ from accounts.models import PushAccountAutomation, PushSecretRecord
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from .base import ( from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi, AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
) )
__all__ = [ __all__ = [
@ -42,7 +42,7 @@ class PushAccountExecutionViewSet(AutomationExecutionViewSet):
return queryset return queryset
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet): class PushAccountRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = PushAccountRecordFilterSet filterset_class = PushAccountRecordFilterSet
search_fields = ('asset__address', 'account__username') search_fields = ('asset__address', 'account__username')
ordering_fields = ('date_finished',) ordering_fields = ('date_finished',)

View File

@ -69,7 +69,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
return return
asset = privilege_account.asset asset = privilege_account.asset
accounts = asset.all_accounts.all() accounts = asset.accounts.all()
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True) accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
if self.secret_type: if self.secret_type:
@ -94,7 +94,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
h['account'] = { h['account'] = {
'name': account.name, 'name': account.name,
'username': account.username, 'username': account.username,
'full_username': account.full_username,
'secret_type': secret_type, 'secret_type': secret_type,
'secret': account.escape_jinja2_syntax(new_secret), 'secret': account.escape_jinja2_syntax(new_secret),
'private_key_path': private_key_path, 'private_key_path': private_key_path,

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('des') }}" password: "{{ account.secret | password_hash('des') }}"
update_password: always update_password: always
ignore_errors: true ignore_errors: true
register: change_secret_result
when: account.secret_type == "password" when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}" - name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}" user: "{{ account.username }}"
key: "{{ account.secret }}" key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}" exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key" when: account.secret_type == "ssh_key"
- name: Refresh connection - name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}" become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}" become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "password" and check_conn_after_change
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)" - name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,7 +112,5 @@
login_private_key_path: "{{ account.private_key_path }}" login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}" gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "ssh_key" and check_conn_after_change
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('sha512') }}" password: "{{ account.secret | password_hash('sha512') }}"
update_password: always update_password: always
ignore_errors: true ignore_errors: true
register: change_secret_result
when: account.secret_type == "password" when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}" - name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}" user: "{{ account.username }}"
key: "{{ account.secret }}" key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}" exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key" when: account.secret_type == "ssh_key"
- name: Refresh connection - name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}" become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}" become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "password" and check_conn_after_change
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)" - name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,7 +112,5 @@
login_private_key_path: "{{ account.private_key_path }}" login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}" gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "ssh_key" and check_conn_after_change
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost

View File

@ -1,27 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Change password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -1,27 +0,0 @@
id: change_secret_ad_windows
name: "{{ 'Windows account change secret' | trans }}"
version: 1
method: change_secret
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account change secret:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
en: 'Using Ansible module win_domain_user to change Windows account secret'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
size 2547712

View File

@ -13,7 +13,6 @@ def parse_date(date_str, default=None):
formats = [ formats = [
'%Y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%S', '%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%d-%m-%Y %H:%M:%S', '%d-%m-%Y %H:%M:%S',
'%Y/%m/%d', '%Y/%m/%d',
'%d-%m-%Y', '%d-%m-%Y',
@ -27,6 +26,7 @@ def parse_date(date_str, default=None):
return default return default
# TODO 后期会挪到 playbook 中
class GatherAccountsFilter: class GatherAccountsFilter:
def __init__(self, tp): def __init__(self, tp):
self.tp = tp self.tp = tp
@ -208,35 +208,14 @@ class GatherAccountsFilter:
key, value = parts key, value = parts
user_info[key.strip()] = value.strip() user_info[key.strip()] = value.strip()
detail = {'groups': user_info.get('Global Group memberships', ''), } detail = {'groups': user_info.get('Global Group memberships', ''), }
user = {
username = user_info.get('User name') 'username': user_info.get('User name', ''),
if not username: 'date_password_change': parse_date(user_info.get('Password last set', '')),
continue 'date_password_expired': parse_date(user_info.get('Password expires', '')),
'date_last_login': parse_date(user_info.get('Last logon', '')),
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('Password last set')),
'date_password_expired': parse_date(user_info.get('Password expires')),
'date_last_login': parse_date(user_info.get('Last logon')),
'groups': detail,
}
return result
@staticmethod
def windows_ad_filter(info):
result = {}
for user_info in info['user_details']:
detail = {'groups': user_info.get('GlobalGroupMemberships', ''), }
username = user_info.get('SamAccountName')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('PasswordLastSet')),
'date_password_expired': parse_date(user_info.get('PasswordExpires')),
'date_last_login': parse_date(user_info.get('LastLogonDate')),
'groups': detail, 'groups': detail,
} }
result[user['username']] = user
return result return result
@staticmethod @staticmethod

View File

@ -4,7 +4,6 @@
- name: Run net user command to get all users - name: Run net user command to get all users
win_shell: net user win_shell: net user
register: user_list_output register: user_list_output
failed_when: false
- name: Parse all users from net user command - name: Parse all users from net user command
set_fact: set_fact:

View File

@ -2,13 +2,10 @@ id: gather_accounts_windows
name: "{{ 'Windows account gather' | trans }}" name: "{{ 'Windows account gather' | trans }}"
version: 1 version: 1
method: gather_accounts method: gather_accounts
category: category: host
- host
type: type:
- windows - windows
i18n: i18n:
Windows account gather: Windows account gather:
zh: 使用命令 net user 收集 Windows 账号 zh: 使用命令 net user 收集 Windows 账号

View File

@ -1,74 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Import ActiveDirectory module
win_shell: Import-Module ActiveDirectory
args:
warn: false
- name: Get the SamAccountName list of all AD users
win_shell: |
Import-Module ActiveDirectory
Get-ADUser -Filter * | Select-Object -ExpandProperty SamAccountName
register: ad_user_list
- name: Set the all_users variable
set_fact:
all_users: "{{ ad_user_list.stdout_lines }}"
- name: Get detailed information for each user
win_shell: |
Import-Module ActiveDirectory
$user = Get-ADUser -Identity {{ item }} -Properties Name, SamAccountName, Enabled, LastLogonDate, PasswordLastSet, msDS-UserPasswordExpiryTimeComputed, MemberOf
$globalGroups = @()
if ($user.MemberOf) {
$globalGroups = $user.MemberOf | ForEach-Object {
try {
$group = Get-ADGroup $_ -ErrorAction Stop
if ($group.GroupScope -eq 'Global') { $group.Name }
} catch {
}
}
}
$passwordExpiry = $null
$expiryRaw = $user.'msDS-UserPasswordExpiryTimeComputed'
if ($expiryRaw) {
try {
$passwordExpiry = [datetime]::FromFileTime($expiryRaw)
} catch {
$passwordExpiry = $null
}
}
$output = [PSCustomObject]@{
Name = $user.Name
SamAccountName = $user.SamAccountName
Enabled = $user.Enabled
LastLogonDate = if ($user.LastLogonDate) { $user.LastLogonDate.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordLastSet = if ($user.PasswordLastSet) { $user.PasswordLastSet.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordExpires = if ($passwordExpiry) { $passwordExpiry.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
GlobalGroupMemberships = $globalGroups
}
$output | ConvertTo-Json -Depth 3
loop: "{{ all_users }}"
register: ad_user_details
ignore_errors: yes
- set_fact:
info:
user_details: >-
{{
ad_user_details.results
| selectattr('rc', 'equalto', 0)
| map(attribute='stdout')
| select('truthy')
| map('from_json')
}}
- debug:
var: info

View File

@ -1,15 +0,0 @@
id: gather_accounts_windows_ad
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category:
- ds
type:
- windows_ad
i18n:
Windows account gather:
zh: 使用命令 Get-ADUser 收集 Windows 账号
ja: コマンド Get-ADUser を使用して Windows アカウントを収集する
en: Using command Get-ADUser to gather accounts

View File

@ -1,6 +1,6 @@
import time
from collections import defaultdict from collections import defaultdict
import time
from django.utils import timezone from django.utils import timezone
from accounts.const import AutomationTypes from accounts.const import AutomationTypes
@ -222,7 +222,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
def _collect_asset_account_info(self, asset, info): def _collect_asset_account_info(self, asset, info):
result = self._filter_success_result(asset.type, info) result = self._filter_success_result(asset.type, info)
accounts = [] accounts = []
for username, info in result.items(): for username, info in result.items():
self.asset_usernames_mapper[str(asset.id)].add(username) self.asset_usernames_mapper[str(asset.id)].add(username)
@ -374,7 +373,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
for asset, accounts_data in self.asset_account_info.items(): for asset, accounts_data in self.asset_account_info.items():
ori_users = self.ori_asset_usernames[str(asset.id)] ori_users = self.ori_asset_usernames[str(asset.id)]
need_analyser_gather_account = []
with tmp_to_org(asset.org_id): with tmp_to_org(asset.org_id):
for d in accounts_data: for d in accounts_data:
username = d["username"] username = d["username"]
@ -387,11 +385,10 @@ class GatherAccountsManager(AccountBasePlaybookManager):
ga = ori_account ga = ori_account
self.update_gathered_account(ori_account, d) self.update_gathered_account(ori_account, d)
ori_found = username in ori_users ori_found = username in ori_users
need_analyser_gather_account.append((asset, ga, d, ori_found)) risk_analyser.analyse_risk(asset, ga, d, ori_found)
self.create_gathered_account.finish() self.create_gathered_account.finish()
self.update_gathered_account.finish() self.update_gathered_account.finish()
for analysis_data in need_analyser_gather_account:
risk_analyser.analyse_risk(*analysis_data)
self.update_gather_accounts_status(asset) self.update_gather_accounts_status(asset)
if not self.is_sync_account: if not self.is_sync_account:
continue continue

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('des') }}" password: "{{ account.secret | password_hash('des') }}"
update_password: always update_password: always
ignore_errors: true ignore_errors: true
register: change_secret_result
when: account.secret_type == "password" when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}" - name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}" user: "{{ account.username }}"
key: "{{ account.secret }}" key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}" exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key" when: account.secret_type == "ssh_key"
- name: Refresh connection - name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}" become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}" become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "password" and check_conn_after_change
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)" - name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,8 +112,6 @@
login_private_key_path: "{{ account.private_key_path }}" login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}" gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "ssh_key" and check_conn_after_change
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('sha512') }}" password: "{{ account.secret | password_hash('sha512') }}"
update_password: always update_password: always
ignore_errors: true ignore_errors: true
register: change_secret_result
when: account.secret_type == "password" when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}" - name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}" user: "{{ account.username }}"
key: "{{ account.secret }}" key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}" exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key" when: account.secret_type == "ssh_key"
- name: Refresh connection - name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}" become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}" become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "password" and check_conn_after_change
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)" - name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,8 +112,6 @@
login_private_key_path: "{{ account.private_key_path }}" login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}" gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}" old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when: when: account.secret_type == "ssh_key" and check_conn_after_change
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost delegate_to: localhost

View File

@ -1,27 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Push user password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -1,25 +0,0 @@
id: push_account_ad_windows
name: "{{ 'Windows account push' | trans }}"
version: 1
method: push_account
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account push:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号推送'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントをプッシュする'
en: 'Using Ansible module win_domain_user to push account'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -11,5 +11,4 @@
login_host: "{{ jms_asset.address }}" login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}" login_port: "{{ jms_asset.port }}"
name: "{{ jms_asset.spec_info.db_name }}" name: "{{ jms_asset.spec_info.db_name }}"
script: "DROP LOGIN {{ account.username }}; select @@version" script: "DROP USER {{ account.username }}"

View File

@ -1,9 +0,0 @@
- hosts: windows
gather_facts: no
tasks:
- name: "Remove account"
ansible.windows.win_domain_user:
name: "{{ account.username }}"
state: absent

View File

@ -1,14 +0,0 @@
id: remove_account_ad_windows
name: "{{ 'Windows account remove' | trans }}"
version: 1
method: remove_account
category:
- ds
type:
- windows_ad
i18n:
Windows account remove:
zh: 使用 Ansible 模块 win_domain_user 删除账号
ja: Ansible モジュール win_domain_user を使用してアカウントを削除する
en: Use the Ansible module win_domain_user to delete an account

View File

@ -10,6 +10,6 @@
rdp_ping: rdp_ping:
login_host: "{{ jms_asset.address }}" login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}" login_port: "{{ jms_asset.port }}"
login_user: "{{ account.full_username }}" login_user: "{{ account.username }}"
login_password: "{{ account.secret }}" login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}" login_secret_type: "{{ account.secret_type }}"

View File

@ -2,10 +2,8 @@ id: verify_account_by_rdp
name: "{{ 'Windows rdp account verify' | trans }}" name: "{{ 'Windows rdp account verify' | trans }}"
category: category:
- host - host
- ds
type: type:
- windows - windows
- windows_ad
method: verify_account method: verify_account
protocol: rdp protocol: rdp
priority: 1 priority: 1

View File

@ -7,5 +7,5 @@
- name: Verify account - name: Verify account
ansible.windows.win_ping: ansible.windows.win_ping:
vars: vars:
ansible_user: "{{ account.full_username }}" ansible_user: "{{ account.username }}"
ansible_password: "{{ account.secret }}" ansible_password: "{{ account.secret }}"

View File

@ -2,12 +2,9 @@ id: verify_account_windows
name: "{{ 'Windows account verify' | trans }}" name: "{{ 'Windows account verify' | trans }}"
version: 1 version: 1
method: verify_account method: verify_account
category: category: host
- host
- ds
type: type:
- windows - windows
- windows_ad
i18n: i18n:
Windows account verify: Windows account verify:

View File

@ -42,7 +42,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
if host.get('error'): if host.get('error'):
return host return host
accounts = asset.all_accounts.all() accounts = asset.accounts.all()
accounts = self.get_accounts(account, accounts) accounts = self.get_accounts(account, accounts)
inventory_hosts = [] inventory_hosts = []
@ -64,7 +64,6 @@ class VerifyAccountManager(AccountBasePlaybookManager):
h['account'] = { h['account'] = {
'name': account.name, 'name': account.name,
'username': account.username, 'username': account.username,
'full_username': account.full_username,
'secret_type': account.secret_type, 'secret_type': account.secret_type,
'secret': account.escape_jinja2_syntax(secret), 'secret': account.escape_jinja2_syntax(secret),
'private_key_path': private_key_path, 'private_key_path': private_key_path,
@ -85,7 +84,6 @@ class VerifyAccountManager(AccountBasePlaybookManager):
def on_host_error(self, host, error, result): def on_host_error(self, host, error, result):
account = self.host_account_mapper.get(host) account = self.host_account_mapper.get(host)
try: try:
error_tp = account.get_err_connectivity(error) account.set_connectivity(Connectivity.ERR)
account.set_connectivity(error_tp)
except Exception as e: except Exception as e:
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n') print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')

View File

@ -5,6 +5,7 @@ import uuid
import django_filters import django_filters
from django.db.models import Q from django.db.models import Q
from django.utils import timezone from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django_filters import rest_framework as drf_filters from django_filters import rest_framework as drf_filters
from rest_framework import filters from rest_framework import filters
from rest_framework.compat import coreapi from rest_framework.compat import coreapi
@ -12,26 +13,11 @@ from rest_framework.compat import coreapi
from assets.models import Node from assets.models import Node
from assets.utils import get_node_from_request from assets.utils import get_node_from_request
from common.drf.filters import BaseFilterSet from common.drf.filters import BaseFilterSet
from common.utils import get_logger
from common.utils.timezone import local_zero_hour, local_now from common.utils.timezone import local_zero_hour, local_now
from .const.automation import ChangeSecretRecordStatusChoice from .const.automation import ChangeSecretRecordStatusChoice
from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \ from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \
AutomationExecution AutomationExecution
logger = get_logger(__file__)
class UUIDFilterMixin:
@staticmethod
def filter_uuid(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
logger.warning(f"Invalid UUID: {value}")
return queryset.none()
return queryset.filter(**{name: value})
class NodeFilterBackend(filters.BaseFilterBackend): class NodeFilterBackend(filters.BaseFilterBackend):
fields = ['node_id'] fields = ['node_id']
@ -57,15 +43,14 @@ class NodeFilterBackend(filters.BaseFilterBackend):
return queryset return queryset
class AccountFilterSet(UUIDFilterMixin, BaseFilterSet): class AccountFilterSet(BaseFilterSet):
ip = drf_filters.CharFilter(field_name="address", lookup_expr="exact") ip = drf_filters.CharFilter(field_name="address", lookup_expr="exact")
name = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
hostname = drf_filters.CharFilter(field_name="name", lookup_expr="exact") hostname = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
username = drf_filters.CharFilter(field_name="username", lookup_expr="exact") username = drf_filters.CharFilter(field_name="username", lookup_expr="exact")
address = drf_filters.CharFilter(field_name="asset__address", lookup_expr="exact") address = drf_filters.CharFilter(field_name="asset__address", lookup_expr="exact")
asset_name = drf_filters.CharFilter(field_name="asset__name", lookup_expr="exact") asset_id = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
asset_id = drf_filters.CharFilter(field_name="asset", method="filter_uuid") asset = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
assets = drf_filters.CharFilter(field_name="asset_id", method="filter_uuid") assets = drf_filters.CharFilter(field_name="asset_id", lookup_expr="exact")
has_secret = drf_filters.BooleanFilter(method="filter_has_secret") has_secret = drf_filters.BooleanFilter(method="filter_has_secret")
platform = drf_filters.CharFilter( platform = drf_filters.CharFilter(
field_name="asset__platform_id", lookup_expr="exact" field_name="asset__platform_id", lookup_expr="exact"
@ -150,9 +135,8 @@ class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
kwargs.update({"date_change_secret__gt": date}) kwargs.update({"date_change_secret__gt": date})
if name == "latest_secret_change_failed": if name == "latest_secret_change_failed":
queryset = ( queryset = queryset.filter(date_change_secret__gt=date).exclude(
queryset.filter(date_change_secret__gt=date) change_secret_status=ChangeSecretRecordStatusChoice.success
.exclude(change_secret_status=ChangeSecretRecordStatusChoice.success)
) )
if kwargs: if kwargs:
@ -162,8 +146,8 @@ class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
class Meta: class Meta:
model = Account model = Account
fields = [ fields = [
"id", "source_id", "secret_type", "category", "type", "id", "asset", "source_id", "secret_type", "category",
"privileged", "secret_reset", "connectivity", "is_active" "type", "privileged", "secret_reset", "connectivity", 'is_active'
] ]
@ -201,6 +185,16 @@ class SecretRecordMixin(drf_filters.FilterSet):
return queryset.filter(date_finished__gte=dt) return queryset.filter(date_finished__gte=dt)
class UUIDExecutionFilterMixin:
@staticmethod
def filter_execution(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
raise ValueError(_('Enter a valid UUID.'))
return queryset.filter(**{name: value})
class DaysExecutionFilterMixin: class DaysExecutionFilterMixin:
days = drf_filters.NumberFilter(method="filter_days") days = drf_filters.NumberFilter(method="filter_days")
field: str field: str
@ -215,10 +209,10 @@ class DaysExecutionFilterMixin:
class ChangeSecretRecordFilterSet( class ChangeSecretRecordFilterSet(
SecretRecordMixin, UUIDFilterMixin, SecretRecordMixin, UUIDExecutionFilterMixin,
DaysExecutionFilterMixin, BaseFilterSet DaysExecutionFilterMixin, BaseFilterSet
): ):
execution_id = django_filters.CharFilter(method="filter_uuid") execution_id = django_filters.CharFilter(method="filter_execution")
days = drf_filters.NumberFilter(method="filter_days") days = drf_filters.NumberFilter(method="filter_days")
field = 'date_finished' field = 'date_finished'
@ -236,8 +230,8 @@ class AutomationExecutionFilterSet(DaysExecutionFilterMixin, BaseFilterSet):
fields = ["days", 'trigger', 'automation_id', 'automation__name'] fields = ["days", 'trigger', 'automation_id', 'automation__name']
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterSet): class PushAccountRecordFilterSet(SecretRecordMixin, UUIDExecutionFilterMixin, BaseFilterSet):
execution_id = django_filters.CharFilter(method="filter_uuid") execution_id = django_filters.CharFilter(method="filter_execution")
class Meta: class Meta:
model = PushSecretRecord model = PushSecretRecord

View File

@ -1,15 +1,65 @@
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from django.db.models import Model
from django.utils import translation from django.utils import translation
from django.utils.translation import gettext_noop
from audits.const import ActionChoices from audits.const import ActionChoices
from audits.handler import create_or_update_operate_log from common.views.mixins import RecordViewLogMixin
from common.utils import i18n_fmt
class AccountRecordViewLogMixin(object): class AccountRecordViewLogMixin(RecordViewLogMixin):
get_object: callable get_object: callable
model: Model get_queryset: callable
@staticmethod
def _filter_params(params):
new_params = {}
need_pop_params = ('format', 'order')
for key, value in params.items():
if key in need_pop_params:
continue
if isinstance(value, list):
value = list(filter(None, value))
if value:
new_params[key] = value
return new_params
def get_resource_display(self, request):
query_params = dict(request.query_params)
params = self._filter_params(query_params)
spm_filter = params.pop("spm", None)
if not params and not spm_filter:
display_message = gettext_noop("Export all")
elif spm_filter:
display_message = gettext_noop("Export only selected items")
else:
query = ",".join(
["%s=%s" % (key, value) for key, value in params.items()]
)
display_message = i18n_fmt(gettext_noop("Export filtered: %s"), query)
return display_message
@property
def detail_msg(self):
return i18n_fmt(
gettext_noop('User %s view/export secret'), self.request.user
)
def list(self, request, *args, **kwargs):
list_func = getattr(super(), 'list')
if not callable(list_func):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = list_func(request, *args, **kwargs)
with translation.override('en'):
resource_display = self.get_resource_display(request)
ids = [q.id for q in self.get_queryset()]
self.record_logs(
ids, ActionChoices.view, self.detail_msg, resource_display=resource_display
)
return response
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
retrieve_func = getattr(super(), 'retrieve') retrieve_func = getattr(super(), 'retrieve')
@ -17,9 +67,9 @@ class AccountRecordViewLogMixin(object):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = retrieve_func(request, *args, **kwargs) response = retrieve_func(request, *args, **kwargs)
with translation.override('en'): with translation.override('en'):
create_or_update_operate_log( resource = self.get_object()
ActionChoices.view, self.model._meta.verbose_name, self.record_logs(
force=True, resource=self.get_object(), [resource.id], ActionChoices.view, self.detail_msg, resource=resource
) )
return response return response

View File

@ -131,46 +131,9 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
@lazyproperty @lazyproperty
def alias(self): def alias(self):
"""
别称因为有虚拟账号@INPUT @MANUAL @USER, 否则为 id
"""
if self.username.startswith('@'): if self.username.startswith('@'):
return self.username return self.username
return str(self.id) return self.name
def is_virtual(self):
"""
不要用 username 去判断因为可能是构造的 account 对象设置了同名账号的用户名,
"""
return self.alias.startswith('@')
def is_ds_account(self):
if self.is_virtual():
return ''
if not self.asset.is_directory_service:
return False
return True
@lazyproperty
def ds(self):
if not self.is_ds_account():
return None
return self.asset.ds
@lazyproperty
def ds_domain(self):
"""这个不能去掉perm_account 会动态设置这个值,以更改 full_username"""
if self.is_virtual():
return ''
if self.ds and self.ds.domain_name:
return self.ds.domain_name
return ''
@property
def full_username(self):
if self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username
@lazyproperty @lazyproperty
def has_secret(self): def has_secret(self):

View File

@ -92,9 +92,8 @@ class VirtualAccount(JMSOrgBaseModel):
from .account import Account from .account import Account
username = user.username username = user.username
alias = AliasAccount.USER.value
with tmp_to_org(asset.org): with tmp_to_org(asset.org):
same_account = cls.objects.filter(alias=alias).first() same_account = cls.objects.filter(alias='@USER').first()
secret = '' secret = ''
if same_account and same_account.secret_from_login: if same_account and same_account.secret_from_login:
@ -102,6 +101,4 @@ class VirtualAccount(JMSOrgBaseModel):
if not secret and not from_permed: if not secret and not from_permed:
secret = input_secret secret = input_secret
account = Account(name=AliasAccount.USER.label, username=username, secret=secret) return Account(name=AliasAccount.USER.label, username=username, secret=secret)
account.alias = alias
return account

View File

@ -233,7 +233,6 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
required=False, queryset=Account.objects, allow_null=True, allow_empty=True, required=False, queryset=Account.objects, allow_null=True, allow_empty=True,
label=_('Su from'), attrs=('id', 'name', 'username') label=_('Su from'), attrs=('id', 'name', 'username')
) )
ds = ObjectRelatedField(read_only=True, label=_('Directory service'), attrs=('id', 'name', 'domain_name'))
class Meta(BaseAccountSerializer.Meta): class Meta(BaseAccountSerializer.Meta):
model = Account model = Account
@ -242,7 +241,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
'date_change_secret', 'change_secret_status' 'date_change_secret', 'change_secret_status'
] ]
fields = BaseAccountSerializer.Meta.fields + [ fields = BaseAccountSerializer.Meta.fields + [
'su_from', 'asset', 'version', 'ds', 'su_from', 'asset', 'version',
'source', 'source_id', 'secret_reset', 'source', 'source_id', 'secret_reset',
] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields ] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields
read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields
@ -259,7 +258,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
queryset = queryset.prefetch_related( queryset = queryset.prefetch_related(
'asset', 'asset__platform', 'asset', 'asset__platform',
'asset__platform__automation' 'asset__platform__automation'
) ).prefetch_related('labels', 'labels__label')
return queryset return queryset

View File

@ -1,11 +1,9 @@
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
from rest_framework import serializers from rest_framework import serializers
from accounts.models import IntegrationApplication from accounts.models import IntegrationApplication
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
from common.serializers.fields import JSONManyToManyField from common.serializers.fields import JSONManyToManyField
from common.utils import random_string
from orgs.mixins.serializers import BulkOrgResourceModelSerializer from orgs.mixins.serializers import BulkOrgResourceModelSerializer
@ -29,18 +27,13 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
'name': {'label': _('Name')}, 'name': {'label': _('Name')},
'accounts_amount': {'label': _('Accounts amount')}, 'accounts_amount': {'label': _('Accounts amount')},
'is_active': {'default': True}, 'is_active': {'default': True},
'logo': {'required': False},
} }
def to_representation(self, instance): def __init__(self, *args, **kwargs):
data = super().to_representation(instance) super().__init__(*args, **kwargs)
if not data.get('logo'): request_method = self.context.get('request').method
data['logo'] = static('img/logo.png') if request_method == 'PUT':
return data self.fields['logo'].required = False
def validate(self, attrs):
attrs['secret'] = random_string(36)
return attrs
class IntegrationAccountSecretSerializer(serializers.Serializer): class IntegrationAccountSecretSerializer(serializers.Serializer):

View File

@ -129,7 +129,7 @@
</tbody> </tbody>
</table> </table>
{% else %} {% else %}
<p class="no-data">{% trans 'No lost accounts found' %}</p> <p class="no-data">{% trans 'No new accounts found' %}</p>
{% endif %} {% endif %}
</div> </div>
</section> </section>

View File

@ -8,6 +8,6 @@ class ActionChoices(models.TextChoices):
review = 'review', _('Review') review = 'review', _('Review')
warning = 'warning', _('Warn') warning = 'warning', _('Warn')
notice = 'notice', _('Notify') notice = 'notice', _('Notify')
notify_and_warn = 'notify_and_warn', _('Prompt and warn') notify_and_warn = 'notify_and_warn', _('Notify and warn')
face_verify = 'face_verify', _('Face Verify') face_verify = 'face_verify', _('Face Verify')
face_online = 'face_online', _('Face Online') face_online = 'face_online', _('Face Online')

View File

@ -18,12 +18,7 @@ class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
class Meta(BaseUserACLSerializer.Meta): class Meta(BaseUserACLSerializer.Meta):
model = LoginACL model = LoginACL
fields = BaseUserACLSerializer.Meta.fields + ['rules', ] fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
action_choices_exclude = [ action_choices_exclude = [ActionChoices.face_online, ActionChoices.face_verify]
ActionChoices.warning,
ActionChoices.notify_and_warn,
ActionChoices.face_online,
ActionChoices.face_verify
]
def get_rules_serializer(self): def get_rules_serializer(self):
return RuleSerializer() return RuleSerializer()

View File

@ -1,10 +1,10 @@
from .asset import * from .asset import *
from .category import * from .category import *
from .domain import *
from .favorite_asset import * from .favorite_asset import *
from .mixin import * from .mixin import *
from .my_asset import *
from .node import * from .node import *
from .platform import * from .platform import *
from .protocol import * from .protocol import *
from .tree import * from .tree import *
from .zone import * from .my_asset import *

View File

@ -3,7 +3,6 @@ from .cloud import *
from .custom import * from .custom import *
from .database import * from .database import *
from .device import * from .device import *
from .ds import *
from .gpt import * from .gpt import *
from .host import * from .host import *
from .permission import * from .permission import *

View File

@ -11,7 +11,6 @@ from rest_framework.decorators import action
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK from rest_framework.status import HTTP_200_OK
from accounts.serializers import AccountSerializer
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
from assets import serializers from assets import serializers
from assets.exceptions import NotSupportedTemporarilyError from assets.exceptions import NotSupportedTemporarilyError
@ -37,12 +36,12 @@ class AssetFilterSet(BaseFilterSet):
platform = drf_filters.CharFilter(method='filter_platform') platform = drf_filters.CharFilter(method='filter_platform')
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway') is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True) exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
zone = drf_filters.CharFilter(method='filter_zone') domain = drf_filters.CharFilter(method='filter_domain')
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact") type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact") category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
protocols = drf_filters.CharFilter(method='filter_protocols') protocols = drf_filters.CharFilter(method='filter_protocols')
gateway_enabled = drf_filters.BooleanFilter( domain_enabled = drf_filters.BooleanFilter(
field_name="platform__gateway_enabled", lookup_expr="exact" field_name="platform__domain_enabled", lookup_expr="exact"
) )
ping_enabled = drf_filters.BooleanFilter( ping_enabled = drf_filters.BooleanFilter(
field_name="platform__automation__ping_enabled", lookup_expr="exact" field_name="platform__automation__ping_enabled", lookup_expr="exact"
@ -85,11 +84,11 @@ class AssetFilterSet(BaseFilterSet):
return queryset return queryset
@staticmethod @staticmethod
def filter_zone(queryset, name, value): def filter_domain(queryset, name, value):
if is_uuid(value): if is_uuid(value):
return queryset.filter(zone_id=value) return queryset.filter(domain_id=value)
else: else:
return queryset.filter(zone__name__contains=value) return queryset.filter(domain__name__contains=value)
@staticmethod @staticmethod
def filter_protocols(queryset, name, value): def filter_protocols(queryset, name, value):
@ -97,7 +96,7 @@ class AssetFilterSet(BaseFilterSet):
return queryset.filter(protocols__name__in=value).distinct() return queryset.filter(protocols__name__in=value).distinct()
class BaseAssetViewSet(OrgBulkModelViewSet): class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
""" """
API endpoint that allows Asset to be viewed or edited. API endpoint that allows Asset to be viewed or edited.
""" """
@ -110,19 +109,18 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
("platform", serializers.PlatformSerializer), ("platform", serializers.PlatformSerializer),
("suggestion", serializers.MiniAssetSerializer), ("suggestion", serializers.MiniAssetSerializer),
("gateways", serializers.GatewaySerializer), ("gateways", serializers.GatewaySerializer),
("accounts", AccountSerializer),
) )
rbac_perms = ( rbac_perms = (
("match", "assets.match_asset"), ("match", "assets.match_asset"),
("platform", "assets.view_platform"), ("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"), ("gateways", "assets.view_gateway"),
("accounts", "assets.view_account"),
("spec_info", "assets.view_asset"), ("spec_info", "assets.view_asset"),
("gathered_info", "assets.view_asset"), ("gathered_info", "assets.view_asset"),
("sync_platform_protocols", "assets.change_asset"), ("sync_platform_protocols", "assets.change_asset"),
) )
extra_filter_backends = [ extra_filter_backends = [
IpInFilterBackend, NodeFilterBackend, AttrRulesFilterBackend IpInFilterBackend,
NodeFilterBackend, AttrRulesFilterBackend
] ]
def perform_destroy(self, instance): def perform_destroy(self, instance):
@ -143,25 +141,6 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
return retrieve_cls return retrieve_cls
return cls return cls
def paginate_queryset(self, queryset):
page = super().paginate_queryset(queryset)
if page:
page = Asset.compute_all_accounts_amount(page)
return page
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="platform") @action(methods=["GET"], detail=True, url_path="platform")
def platform(self, *args, **kwargs): def platform(self, *args, **kwargs):
asset = super().get_object() asset = super().get_object()
@ -171,10 +150,10 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="gateways") @action(methods=["GET"], detail=True, url_path="gateways")
def gateways(self, *args, **kwargs): def gateways(self, *args, **kwargs):
asset = self.get_object() asset = self.get_object()
if not asset.zone: if not asset.domain:
gateways = Gateway.objects.none() gateways = Gateway.objects.none()
else: else:
gateways = asset.zone.gateways gateways = asset.domain.gateways
return self.get_paginated_response_from_queryset(gateways) return self.get_paginated_response_from_queryset(gateways)
@action(methods=['post'], detail=False, url_path='sync-platform-protocols') @action(methods=['post'], detail=False, url_path='sync-platform-protocols')
@ -210,6 +189,17 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
Protocol.objects.bulk_create(objs) Protocol.objects.bulk_create(objs)
return Response(status=status.HTTP_200_OK) return Response(status=status.HTTP_200_OK)
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
def filter_bulk_update_data(self): def filter_bulk_update_data(self):
bulk_data = [] bulk_data = []
skip_assets = [] skip_assets = []

View File

@ -1,12 +1,12 @@
from assets.models import Cloud, Asset from assets.models import Cloud, Asset
from assets.serializers import CloudSerializer from assets.serializers import CloudSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['CloudViewSet'] __all__ = ['CloudViewSet']
class CloudViewSet(BaseAssetViewSet): class CloudViewSet(AssetViewSet):
model = Cloud model = Cloud
perm_model = Asset perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Custom, Asset from assets.models import Custom, Asset
from assets.serializers import CustomSerializer from assets.serializers import CustomSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['CustomViewSet'] __all__ = ['CustomViewSet']
class CustomViewSet(BaseAssetViewSet): class CustomViewSet(AssetViewSet):
model = Custom model = Custom
perm_model = Asset perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Database, Asset from assets.models import Database, Asset
from assets.serializers import DatabaseSerializer from assets.serializers import DatabaseSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['DatabaseViewSet'] __all__ = ['DatabaseViewSet']
class DatabaseViewSet(BaseAssetViewSet): class DatabaseViewSet(AssetViewSet):
model = Database model = Database
perm_model = Asset perm_model = Asset

View File

@ -1,11 +1,11 @@
from assets.models import Device, Asset
from assets.serializers import DeviceSerializer from assets.serializers import DeviceSerializer
from .asset import BaseAssetViewSet from assets.models import Device, Asset
from .asset import AssetViewSet
__all__ = ['DeviceViewSet'] __all__ = ['DeviceViewSet']
class DeviceViewSet(BaseAssetViewSet): class DeviceViewSet(AssetViewSet):
model = Device model = Device
perm_model = Asset perm_model = Asset

View File

@ -1,16 +0,0 @@
from assets.models import DirectoryService, Asset
from assets.serializers import DSSerializer
from .asset import BaseAssetViewSet
__all__ = ['DSViewSet']
class DSViewSet(BaseAssetViewSet):
model = DirectoryService
perm_model = Asset
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
serializer_classes['default'] = DSSerializer
return serializer_classes

View File

@ -1,12 +1,12 @@
from assets.models import GPT, Asset from assets.models import GPT, Asset
from assets.serializers import GPTSerializer from assets.serializers import GPTSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['GPTViewSet'] __all__ = ['GPTViewSet']
class GPTViewSet(BaseAssetViewSet): class GPTViewSet(AssetViewSet):
model = GPT model = GPT
perm_model = Asset perm_model = Asset

View File

@ -1,11 +1,11 @@
from assets.models import Host, Asset from assets.models import Host, Asset
from assets.serializers import HostSerializer from assets.serializers import HostSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['HostViewSet'] __all__ = ['HostViewSet']
class HostViewSet(BaseAssetViewSet): class HostViewSet(AssetViewSet):
model = Host model = Host
perm_model = Asset perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Web, Asset from assets.models import Web, Asset
from assets.serializers import WebSerializer from assets.serializers import WebSerializer
from .asset import BaseAssetViewSet from .asset import AssetViewSet
__all__ = ['WebViewSet'] __all__ = ['WebViewSet']
class WebViewSet(BaseAssetViewSet): class WebViewSet(AssetViewSet):
model = Web model = Web
perm_model = Asset perm_model = Asset

View File

@ -9,24 +9,24 @@ from common.utils import get_logger
from orgs.mixins.api import OrgBulkModelViewSet from orgs.mixins.api import OrgBulkModelViewSet
from .asset import HostViewSet from .asset import HostViewSet
from .. import serializers from .. import serializers
from ..models import Zone, Gateway from ..models import Domain, Gateway
logger = get_logger(__file__) logger = get_logger(__file__)
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"] __all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
class ZoneViewSet(OrgBulkModelViewSet): class DomainViewSet(OrgBulkModelViewSet):
model = Zone model = Domain
filterset_fields = ("name",) filterset_fields = ("name",)
search_fields = filterset_fields search_fields = filterset_fields
serializer_classes = { serializer_classes = {
'default': serializers.ZoneSerializer, 'default': serializers.DomainSerializer,
'list': serializers.ZoneListSerializer, 'list': serializers.DomainListSerializer,
} }
def get_serializer_class(self): def get_serializer_class(self):
if self.request.query_params.get('gateway'): if self.request.query_params.get('gateway'):
return serializers.ZoneWithGatewaySerializer return serializers.DomainWithGatewaySerializer
return super().get_serializer_class() return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs): def partial_update(self, request, *args, **kwargs):
@ -36,8 +36,8 @@ class ZoneViewSet(OrgBulkModelViewSet):
class GatewayViewSet(HostViewSet): class GatewayViewSet(HostViewSet):
perm_model = Gateway perm_model = Gateway
filterset_fields = ("zone__name", "name", "zone") filterset_fields = ("domain__name", "name", "domain")
search_fields = ("zone__name",) search_fields = ("domain__name",)
def get_serializer_classes(self): def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes() serializer_classes = super().get_serializer_classes()
@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
return serializer_classes return serializer_classes
def get_queryset(self): def get_queryset(self):
queryset = Zone.get_gateway_queryset() queryset = Domain.get_gateway_queryset()
return queryset return queryset
@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
} }
def get_queryset(self): def get_queryset(self):
queryset = Zone.get_gateway_queryset() queryset = Domain.get_gateway_queryset()
return queryset return queryset
def post(self, request, *args, **kwargs): def post(self, request, *args, **kwargs):

View File

@ -52,7 +52,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
queryset = ( queryset = (
super().get_queryset() super().get_queryset()
.annotate(assets_amount=Coalesce(Subquery(asset_count_subquery), Value(0))) .annotate(assets_amount=Coalesce(Subquery(asset_count_subquery), Value(0)))
.prefetch_related('protocols', 'automation') .prefetch_related('protocols', 'automation', 'labels', 'labels__label')
) )
queryset = queryset.filter(type__in=AllTypes.get_types_values()) queryset = queryset.filter(type__in=AllTypes.get_types_values())
return queryset return queryset

View File

@ -3,10 +3,10 @@ import json
import logging import logging
import os import os
import shutil import shutil
import time
from collections import defaultdict from collections import defaultdict
from socket import gethostname from socket import gethostname
import time
import yaml import yaml
from django.conf import settings from django.conf import settings
from django.template.loader import render_to_string from django.template.loader import render_to_string
@ -334,8 +334,7 @@ class PlaybookPrepareMixin:
return sub_playbook_path return sub_playbook_path
def check_automation_enabled(self, platform, assets): def check_automation_enabled(self, platform, assets):
automation = getattr(platform, 'automation', None) if not platform.automation or not platform.automation.ansible_enabled:
if not (automation and getattr(automation, 'ansible_enabled', False)):
print(_(" - Platform {} ansible disabled").format(platform.name)) print(_(" - Platform {} ansible disabled").format(platform.name))
self.on_assets_not_ansible_enabled(assets) self.on_assets_not_ansible_enabled(assets)
return False return False

View File

@ -1,5 +1,3 @@
from collections import Counter
__all__ = ['FormatAssetInfo'] __all__ = ['FormatAssetInfo']
@ -9,37 +7,13 @@ class FormatAssetInfo:
self.tp = tp self.tp = tp
@staticmethod @staticmethod
def get_cpu_model_count(cpus): def posix_format(info):
try: for cpu_model in info.get('cpu_model', []):
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)] if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
break
model_counts = Counter(models) else:
cpu_model = ''
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()]) info['cpu_model'] = cpu_model[:48]
except Exception as e:
print(f"Error processing CPU model list: {e}")
result = ''
return result
@staticmethod
def get_gpu_model_count(gpus):
try:
model_counts = Counter(gpus)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing GPU model list: {e}")
result = ''
return result
def posix_format(self, info):
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
info['gpu_model'] = gpus
info['cpu_model'] = cpus
info['cpu_count'] = info.get('cpu_count', 0) info['cpu_count'] = info.get('cpu_count', 0)
return info return info

View File

@ -23,16 +23,5 @@
arch: "{{ ansible_architecture }}" arch: "{{ ansible_architecture }}"
kernel: "{{ ansible_kernel }}" kernel: "{{ ansible_kernel }}"
- name: Get GPU info with nvidia-smi
shell: |
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
register: gpu_info
ignore_errors: yes
- name: Merge GPU info into final info
set_fact:
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
- debug: - debug:
var: info var: info

View File

@ -2,12 +2,9 @@ id: gather_facts_windows
name: "{{ 'Gather facts windows' | trans }}" name: "{{ 'Gather facts windows' | trans }}"
version: 1 version: 1
method: gather_facts method: gather_facts
category: category: host
- host
- ds
type: type:
- windows - windows
- windows_ad
i18n: i18n:
Gather facts windows: Gather facts windows:
zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息' zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息'

View File

@ -3,10 +3,8 @@ name: "{{ 'Ping by pyfreerdp' | trans }}"
category: category:
- device - device
- host - host
- ds
type: type:
- windows - windows
- windows_ad
method: ping method: ping
protocol: rdp protocol: rdp
priority: 1 priority: 1

View File

@ -3,7 +3,6 @@ name: "{{ 'Ping by paramiko' | trans }}"
category: category:
- device - device
- host - host
- ds
type: type:
- all - all
method: ping method: ping

View File

@ -3,7 +3,6 @@ name: "{{ 'Ping by telnet' | trans }}"
category: category:
- device - device
- host - host
- ds
type: type:
- all - all
method: ping method: ping

View File

@ -2,12 +2,9 @@ id: win_ping
name: "{{ 'Windows ping' | trans }}" name: "{{ 'Windows ping' | trans }}"
version: 1 version: 1
method: ping method: ping
category: category: host
- host
- ds
type: type:
- windows - windows
- windows_ad
i18n: i18n:
Windows ping: Windows ping:
zh: 使用 Ansible 模块 内置模块 win_ping 来测试可连接性 zh: 使用 Ansible 模块 内置模块 win_ping 来测试可连接性

View File

@ -37,11 +37,10 @@ class PingManager(BasePlaybookManager):
def on_host_error(self, host, error, result): def on_host_error(self, host, error, result):
asset, account = self.host_asset_and_account_mapper.get(host) asset, account = self.host_asset_and_account_mapper.get(host)
try: try:
error_tp = asset.get_err_connectivity(error) asset.set_connectivity(Connectivity.ERR)
asset.set_connectivity(error_tp)
if not account: if not account:
return return
account.set_connectivity(error_tp) account.set_connectivity(Connectivity.ERR)
except Exception as e: except Exception as e:
print(f'\033[31m Update account {account.name} or ' print(f'\033[31m Update account {account.name} or '
f'update asset {asset.name} connectivity failed: {e} \033[0m\n') f'update asset {asset.name} connectivity failed: {e} \033[0m\n')

View File

@ -7,12 +7,6 @@ class Connectivity(TextChoices):
NA = 'na', _('N/A') NA = 'na', _('N/A')
OK = 'ok', _('OK') OK = 'ok', _('OK')
ERR = 'err', _('Error') ERR = 'err', _('Error')
AUTH_ERR = 'auth_err', _('Authentication error')
SUDO_ERR = 'sudo_err', _('Sudo permission error')
PASSWORD_ERR = 'password_err', _('Invalid password error')
OPENSSH_KEY_ERR = 'openssh_key_err', _('OpenSSH key error')
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
CREATE_DIR_ERR = 'create_dir_err', _('Create directory error')
class AutomationTypes(TextChoices): class AutomationTypes(TextChoices):

View File

@ -37,7 +37,7 @@ class FillType(models.TextChoices):
class BaseType(TextChoices): class BaseType(TextChoices):
""" """
约束应该考虑代是对平台对限制避免多余对选项: mysql 开启 ssh, 约束应该考虑代是对平台对限制避免多余对选项: mysql 开启 ssh,
或者开启了也没有作用, 比如 k8s 开启了 gateway 目前还不支持 或者开启了也没有作用, 比如 k8s 开启了 domain目前还不支持
""" """
@classmethod @classmethod
@ -112,7 +112,8 @@ class BaseType(TextChoices):
@classmethod @classmethod
def get_choices(cls): def get_choices(cls):
choices = cls.choices if not settings.XPACK_LICENSE_IS_VALID:
if not settings.XPACK_LICENSE_IS_VALID and hasattr(cls, 'get_community_types'):
choices = [(tp.value, tp.label) for tp in cls.get_community_types()] choices = [(tp.value, tp.label) for tp in cls.get_community_types()]
else:
choices = cls.choices
return choices return choices

View File

@ -12,7 +12,6 @@ class Category(ChoicesMixin, models.TextChoices):
DATABASE = 'database', _("Database") DATABASE = 'database', _("Database")
CLOUD = 'cloud', _("Cloud service") CLOUD = 'cloud', _("Cloud service")
WEB = 'web', _("Web") WEB = 'web', _("Web")
DS = 'ds', _("Directory service")
CUSTOM = 'custom', _("Custom type") CUSTOM = 'custom', _("Custom type")
@classmethod @classmethod

View File

@ -13,11 +13,11 @@ class CloudTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': False, 'domain_enabled': False,
'su_enabled': False, 'su_enabled': False,
}, },
cls.K8S: { cls.K8S: {
'gateway_enabled': True, 'domain_enabled': True,
} }
} }

View File

@ -20,7 +20,7 @@ class CustomTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': False, 'domain_enabled': False,
'su_enabled': False, 'su_enabled': False,
}, },
} }

View File

@ -20,7 +20,7 @@ class DatabaseTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': True, 'domain_enabled': True,
'su_enabled': False, 'su_enabled': False,
} }
} }

View File

@ -19,8 +19,7 @@ class DeviceTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': True, 'domain_enabled': True,
'ds_enabled': True,
'su_enabled': True, 'su_enabled': True,
'su_methods': ['enable', 'super', 'super_level'] 'su_methods': ['enable', 'super', 'super_level']
} }

View File

@ -1,70 +0,0 @@
from django.utils.translation import gettext_lazy as _
from .base import BaseType
class DirectoryTypes(BaseType):
GENERAL = 'general', _('General')
# LDAP = 'ldap', _('LDAP')
# AD = 'ad', _('Active Directory')
WINDOWS_AD = 'windows_ad', _('Windows Active Directory')
# AZURE_AD = 'azure_ad', _('Azure Active Directory')
@classmethod
def _get_base_constrains(cls) -> dict:
return {
'*': {
'charset_enabled': True,
'gateway_enabled': True,
'ds_enabled': False,
'su_enabled': True,
},
cls.WINDOWS_AD: {
'su_enabled': False,
}
}
@classmethod
def _get_automation_constrains(cls) -> dict:
constrains = {
'*': {
'ansible_enabled': False,
},
cls.WINDOWS_AD: {
'ansible_enabled': True,
'ping_enabled': True,
'gather_facts_enabled': True,
'verify_account_enabled': True,
'change_secret_enabled': True,
'push_account_enabled': True,
'gather_accounts_enabled': True,
'remove_account_enabled': True,
}
}
return constrains
@classmethod
def _get_protocol_constrains(cls) -> dict:
return {
cls.GENERAL: {
'choices': ['ssh']
},
cls.WINDOWS_AD: {
'choices': ['rdp', 'ssh', 'vnc', 'winrm']
},
}
@classmethod
def internal_platforms(cls):
return {
cls.WINDOWS_AD: [
{'name': 'Windows Active Directory'}
],
}
@classmethod
def get_community_types(cls):
return [
cls.GENERAL,
]

View File

@ -11,7 +11,7 @@ class GPTTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': False, 'domain_enabled': False,
'su_enabled': False, 'su_enabled': False,
} }
} }

View File

@ -18,9 +18,8 @@ class HostTypes(BaseType):
'*': { '*': {
'charset_enabled': True, 'charset_enabled': True,
'charset': 'utf-8', # default 'charset': 'utf-8', # default
'gateway_enabled': True, 'domain_enabled': True,
'su_enabled': True, 'su_enabled': True,
'ds_enabled': True,
'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'], 'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'],
}, },
cls.WINDOWS: { cls.WINDOWS: {
@ -57,6 +56,7 @@ class HostTypes(BaseType):
'change_secret_enabled': True, 'change_secret_enabled': True,
'push_account_enabled': True, 'push_account_enabled': True,
'remove_account_enabled': True, 'remove_account_enabled': True,
}, },
cls.WINDOWS: { cls.WINDOWS: {
'ansible_config': { 'ansible_config': {
@ -69,6 +69,7 @@ class HostTypes(BaseType):
'ping_enabled': False, 'ping_enabled': False,
'gather_facts_enabled': False, 'gather_facts_enabled': False,
'gather_accounts_enabled': False, 'gather_accounts_enabled': False,
'verify_account_enabled': False,
'change_secret_enabled': False, 'change_secret_enabled': False,
'push_account_enabled': False 'push_account_enabled': False
}, },
@ -81,7 +82,7 @@ class HostTypes(BaseType):
{'name': 'Linux'}, {'name': 'Linux'},
{ {
'name': GATEWAY_NAME, 'name': GATEWAY_NAME,
'gateway_enabled': True, 'domain_enabled': True,
} }
], ],
cls.UNIX: [ cls.UNIX: [
@ -125,5 +126,5 @@ class HostTypes(BaseType):
@classmethod @classmethod
def get_community_types(cls) -> list: def get_community_types(cls) -> list:
return [ return [
cls.LINUX, cls.WINDOWS, cls.UNIX, cls.OTHER_HOST cls.LINUX, cls.UNIX, cls.WINDOWS, cls.OTHER_HOST
] ]

View File

@ -13,7 +13,6 @@ from .cloud import CloudTypes
from .custom import CustomTypes from .custom import CustomTypes
from .database import DatabaseTypes from .database import DatabaseTypes
from .device import DeviceTypes from .device import DeviceTypes
from .ds import DirectoryTypes
from .gpt import GPTTypes from .gpt import GPTTypes
from .host import HostTypes from .host import HostTypes
from .web import WebTypes from .web import WebTypes
@ -23,8 +22,7 @@ class AllTypes(ChoicesMixin):
choices: list choices: list
includes = [ includes = [
HostTypes, DeviceTypes, DatabaseTypes, HostTypes, DeviceTypes, DatabaseTypes,
CloudTypes, WebTypes, CustomTypes, CloudTypes, WebTypes, CustomTypes, GPTTypes
DirectoryTypes, GPTTypes
] ]
_category_constrains = {} _category_constrains = {}
_automation_methods = None _automation_methods = None
@ -175,7 +173,6 @@ class AllTypes(ChoicesMixin):
(Category.DATABASE, DatabaseTypes), (Category.DATABASE, DatabaseTypes),
(Category.WEB, WebTypes), (Category.WEB, WebTypes),
(Category.CLOUD, CloudTypes), (Category.CLOUD, CloudTypes),
(Category.DS, DirectoryTypes),
(Category.CUSTOM, CustomTypes) (Category.CUSTOM, CustomTypes)
] ]
return types return types
@ -312,7 +309,7 @@ class AllTypes(ChoicesMixin):
'category': category, 'category': category,
'type': tp, 'internal': True, 'type': tp, 'internal': True,
'charset': constraints.get('charset', 'utf-8'), 'charset': constraints.get('charset', 'utf-8'),
'gateway_enabled': constraints.get('gateway_enabled', False), 'domain_enabled': constraints.get('domain_enabled', False),
'su_enabled': constraints.get('su_enabled', False), 'su_enabled': constraints.get('su_enabled', False),
} }
if data['su_enabled'] and data.get('su_methods'): if data['su_enabled'] and data.get('su_methods'):

View File

@ -11,7 +11,7 @@ class WebTypes(BaseType):
return { return {
'*': { '*': {
'charset_enabled': False, 'charset_enabled': False,
'gateway_enabled': False, 'domain_enabled': False,
'su_enabled': False, 'su_enabled': False,
} }
} }

View File

@ -1,11 +1,11 @@
# Generated by Django 4.1.13 on 2024-05-09 03:16 # Generated by Django 4.1.13 on 2024-05-09 03:16
import django.db.models.deletion import json
import assets.models.asset.common
from django.db.models import F, Q
from django.conf import settings from django.conf import settings
from django.db import migrations, models from django.db import migrations, models
from django.db.models import F import django.db.models.deletion
import assets.models.asset.common
class Migration(migrations.Migration): class Migration(migrations.Migration):
@ -39,26 +39,22 @@ class Migration(migrations.Migration):
migrations.AddField( migrations.AddField(
model_name='automationexecution', model_name='automationexecution',
name='automation', name='automation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
to='assets.baseautomation', verbose_name='Automation task'),
), ),
migrations.AddField( migrations.AddField(
model_name='asset', model_name='asset',
name='domain', name='domain',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.domain', verbose_name='Zone'),
related_name='assets', to='assets.domain', verbose_name='Zone'),
), ),
migrations.AddField( migrations.AddField(
model_name='asset', model_name='asset',
name='nodes', name='nodes',
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', to='assets.node', verbose_name='Nodes'),
to='assets.node', verbose_name='Nodes'),
), ),
migrations.AddField( migrations.AddField(
model_name='asset', model_name='asset',
name='platform', name='platform',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', to='assets.platform', verbose_name='Platform'),
to='assets.platform', verbose_name='Platform'),
), ),
migrations.CreateModel( migrations.CreateModel(
name='AssetBaseAutomation', name='AssetBaseAutomation',
@ -75,9 +71,7 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='GatherFactsAutomation', name='GatherFactsAutomation',
fields=[ fields=[
('baseautomation_ptr', ('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
], ],
options={ options={
'verbose_name': 'Gather asset facts', 'verbose_name': 'Gather asset facts',
@ -87,9 +81,7 @@ class Migration(migrations.Migration):
migrations.CreateModel( migrations.CreateModel(
name='PingAutomation', name='PingAutomation',
fields=[ fields=[
('baseautomation_ptr', ('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
], ],
options={ options={
'verbose_name': 'Ping asset', 'verbose_name': 'Ping asset',

View File

@ -18,7 +18,7 @@ platforms_data_json = '''[
"type": "linux", "type": "linux",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -119,7 +119,7 @@ platforms_data_json = '''[
"type": "unix", "type": "unix",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -209,7 +209,7 @@ platforms_data_json = '''[
"type": "unix", "type": "unix",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -299,7 +299,7 @@ platforms_data_json = '''[
"type": "unix", "type": "unix",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -389,7 +389,7 @@ platforms_data_json = '''[
"type": "windows", "type": "windows",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -481,7 +481,7 @@ platforms_data_json = '''[
"security": "any" "security": "any"
}, },
"internal": false, "internal": false,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -582,7 +582,7 @@ platforms_data_json = '''[
"type": "other", "type": "other",
"meta": {}, "meta": {},
"internal": false, "internal": false,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -684,7 +684,7 @@ platforms_data_json = '''[
"security": "rdp" "security": "rdp"
}, },
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -776,7 +776,7 @@ platforms_data_json = '''[
"security": "tls" "security": "tls"
}, },
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -866,7 +866,7 @@ platforms_data_json = '''[
"type": "unix", "type": "unix",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -956,7 +956,7 @@ platforms_data_json = '''[
"type": "linux", "type": "linux",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1057,7 +1057,7 @@ platforms_data_json = '''[
"type": "windows", "type": "windows",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1136,7 +1136,7 @@ platforms_data_json = '''[
"type": "general", "type": "general",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1201,7 +1201,7 @@ platforms_data_json = '''[
"type": "general", "type": "general",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": "enable", "su_method": "enable",
"custom_fields": [], "custom_fields": [],
@ -1266,7 +1266,7 @@ platforms_data_json = '''[
"type": "general", "type": "general",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": "super", "su_method": "super",
"custom_fields": [], "custom_fields": [],
@ -1332,7 +1332,7 @@ platforms_data_json = '''[
"type": "general", "type": "general",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": true, "su_enabled": true,
"su_method": "super_level", "su_method": "super_level",
"custom_fields": [], "custom_fields": [],
@ -1397,7 +1397,7 @@ platforms_data_json = '''[
"type": "mysql", "type": "mysql",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1449,7 +1449,7 @@ platforms_data_json = '''[
"type": "mariadb", "type": "mariadb",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1501,7 +1501,7 @@ platforms_data_json = '''[
"type": "postgresql", "type": "postgresql",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1553,7 +1553,7 @@ platforms_data_json = '''[
"type": "oracle", "type": "oracle",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1605,7 +1605,7 @@ platforms_data_json = '''[
"type": "sqlserver", "type": "sqlserver",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1657,7 +1657,7 @@ platforms_data_json = '''[
"type": "clickhouse", "type": "clickhouse",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1709,7 +1709,7 @@ platforms_data_json = '''[
"type": "mongodb", "type": "mongodb",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1761,7 +1761,7 @@ platforms_data_json = '''[
"type": "redis", "type": "redis",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1815,7 +1815,7 @@ platforms_data_json = '''[
"type": "redis", "type": "redis",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1869,7 +1869,7 @@ platforms_data_json = '''[
"type": "website", "type": "website",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": false, "domain_enabled": false,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1924,7 +1924,7 @@ platforms_data_json = '''[
"type": "private", "type": "private",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": false, "domain_enabled": false,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -1979,7 +1979,7 @@ platforms_data_json = '''[
"type": "k8s", "type": "k8s",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": false, "domain_enabled": false,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -2029,7 +2029,7 @@ platforms_data_json = '''[
"type": "chatgpt", "type": "chatgpt",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": false, "domain_enabled": false,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -2081,7 +2081,7 @@ platforms_data_json = '''[
"type": "db2", "type": "db2",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],
@ -2131,7 +2131,7 @@ platforms_data_json = '''[
"type": "dameng", "type": "dameng",
"meta": {}, "meta": {},
"internal": true, "internal": true,
"gateway_enabled": true, "domain_enabled": true,
"su_enabled": false, "su_enabled": false,
"su_method": null, "su_method": null,
"custom_fields": [], "custom_fields": [],

View File

@ -1,57 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-03 09:51
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("assets", "0015_automationexecution_type"),
]
operations = [
migrations.CreateModel(
name="DirectoryService",
fields=[
(
"asset_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="assets.asset",
),
),
(
"domain_name",
models.CharField(
blank=True,
default="",
max_length=128,
verbose_name="Domain name",
),
),
],
options={
"verbose_name": "Directory service",
"default_related_name": "ds"
},
bases=("assets.asset",),
),
migrations.AddField(
model_name="platform",
name="ds_enabled",
field=models.BooleanField(default=False, verbose_name="DS enabled"),
),
migrations.AddField(
model_name="asset",
name="directory_services",
field=models.ManyToManyField(
related_name="assets",
to="assets.directoryservice",
verbose_name="Directory services",
)
),
]

View File

@ -1,165 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-07 03:24
import json
from django.db import migrations
from assets.const import AllTypes
def add_ds_platforms(apps, schema_editor):
data = """
[
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "WindowsActiveDirectory",
"category": "ds",
"type": "windows_ad",
"meta": {},
"internal": true,
"gateway_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
"automation": {
"ansible_enabled": true,
"ansible_config": {
"ansible_shell_type": "cmd",
"ansible_connection": "ssh"
},
"ping_enabled": true,
"ping_method": "ping_by_rdp",
"ping_params": {},
"gather_facts_enabled": true,
"gather_facts_method": "gather_facts_windows",
"gather_facts_params": {},
"change_secret_enabled": true,
"change_secret_method": "change_secret_ad_windows",
"change_secret_params": {
},
"push_account_enabled": true,
"push_account_method": "push_account_ad_windows",
"push_account_params": {},
"verify_account_enabled": true,
"verify_account_method": "verify_account_by_rdp",
"verify_account_params": {
},
"gather_accounts_enabled": true,
"gather_accounts_method": "gather_accounts_windows_ad",
"gather_accounts_params": {
},
"remove_account_enabled": true,
"remove_account_method": "remove_account_ad_windows",
"remove_account_params": {
}
},
"protocols": [
{
"name": "rdp",
"port": 3389,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"console": false,
"security": "any"
}
},
{
"name": "ssh",
"port": 22,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
},
{
"name": "vnc",
"port": 5900,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
}
},
{
"name": "winrm",
"port": 5985,
"primary": false,
"required": false,
"default": false,
"public": false,
"setting": {
"use_ssl": false
}
}
]
},
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "General",
"category": "ds",
"type": "general",
"meta": {
},
"internal": true,
"gateway_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [
],
"automation": {
"ansible_enabled": false,
"ansible_config": {
}
},
"protocols": [
{
"name": "ssh",
"port": 22,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
}
]
}
]
"""
platform_model = apps.get_model('assets', 'Platform')
automation_cls = apps.get_model('assets', 'PlatformAutomation')
platform_datas = json.loads(data)
for platform_data in platform_datas:
AllTypes.create_or_update_by_platform_data(
platform_data, platform_cls=platform_model,
automation_cls=automation_cls
)
class Migration(migrations.Migration):
dependencies = [
("assets", "0016_directory_service"),
]
operations = [
migrations.RunPython(add_ds_platforms)
]

View File

@ -1,26 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-18 08:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("assets", "0017_auto_20250407_1124"),
]
operations = [
migrations.RenameField(
model_name="platform",
old_name="domain_enabled",
new_name="gateway_enabled",
),
migrations.RenameModel(
old_name="Domain",
new_name="Zone",
),
migrations.RenameField(
model_name="asset",
old_name="domain",
new_name="zone",
),
]

View File

@ -1,10 +1,9 @@
# noqa
from .base import * from .base import *
from .platform import * from .platform import *
from .asset import * from .asset import *
from .label import Label from .label import Label
from .gateway import * from .gateway import *
from .zone import * # noqa from .domain import *
from .node import * from .node import *
from .favorite_asset import * from .favorite_asset import *
from .automations import * from .automations import *

View File

@ -3,7 +3,6 @@ from .common import *
from .custom import * from .custom import *
from .database import * from .database import *
from .device import * from .device import *
from .ds import *
from .gpt import * from .gpt import *
from .host import * from .host import *
from .web import * from .web import *

View File

@ -6,7 +6,7 @@ import logging
from collections import defaultdict from collections import defaultdict
from django.db import models from django.db import models
from django.db.models import Q, Count from django.db.models import Q
from django.forms import model_to_dict from django.forms import model_to_dict
from django.utils.translation import gettext_lazy as _ from django.utils.translation import gettext_lazy as _
@ -168,17 +168,13 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = models.ForeignKey( platform = models.ForeignKey(
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets' Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
) )
zone = models.ForeignKey( domain = models.ForeignKey(
"assets.Zone", null=True, blank=True, related_name='assets', "assets.Domain", null=True, blank=True, related_name='assets',
verbose_name=_("Zone"), on_delete=models.SET_NULL verbose_name=_("Zone"), on_delete=models.SET_NULL
) )
nodes = models.ManyToManyField( nodes = models.ManyToManyField(
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes") 'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
) )
directory_services = models.ManyToManyField(
'assets.DirectoryService', related_name='assets',
verbose_name=_("Directory services")
)
is_active = models.BooleanField(default=True, verbose_name=_('Active')) is_active = models.BooleanField(default=True, verbose_name=_('Active'))
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息 gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict) custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
@ -205,10 +201,6 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
info[i.name] = v info[i.name] = v
return info return info
@lazyproperty
def is_directory_service(self):
return self.category == const.Category.DS and hasattr(self, 'ds')
@lazyproperty @lazyproperty
def spec_info(self): def spec_info(self):
instance = getattr(self, self.category, None) instance = getattr(self, self.category, None)
@ -244,7 +236,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = self.platform platform = self.platform
auto_config = { auto_config = {
'su_enabled': platform.su_enabled, 'su_enabled': platform.su_enabled,
'gateway_enabled': platform.gateway_enabled, 'domain_enabled': platform.domain_enabled,
'ansible_enabled': False 'ansible_enabled': False
} }
automation = getattr(self.platform, 'automation', None) automation = getattr(self.platform, 'automation', None)
@ -253,28 +245,9 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
auto_config.update(model_to_dict(automation)) auto_config.update(model_to_dict(automation))
return auto_config return auto_config
@property
def all_accounts(self):
if not self.joined_dir_svcs:
queryset = self.accounts.all()
else:
queryset = self.accounts.model.objects.filter(asset__in=[self.id, *self.joined_dir_svcs])
return queryset
@property
def dc_accounts(self):
queryset = self.accounts.model.objects.filter(asset__in=[*self.joined_dir_svcs])
return queryset
@lazyproperty
def all_valid_accounts(self):
queryset = (self.all_accounts.filter(is_active=True)
.prefetch_related('asset', 'asset__platform'))
return queryset
@lazyproperty @lazyproperty
def accounts_amount(self): def accounts_amount(self):
return self.all_accounts.count() return self.accounts.count()
def get_target_ip(self): def get_target_ip(self):
return self.address return self.address
@ -286,41 +259,6 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
protocol = self.protocols.all().filter(name=protocol).first() protocol = self.protocols.all().filter(name=protocol).first()
return protocol.port if protocol else 0 return protocol.port if protocol else 0
def is_dir_svc(self):
return self.category == const.Category.DS
@property
def joined_dir_svcs(self):
return self.directory_services.all()
@classmethod
def compute_all_accounts_amount(cls, assets):
from .ds import DirectoryService
asset_ids = [asset.id for asset in assets]
asset_id_dc_ids_mapper = defaultdict(list)
dc_ids = set()
asset_dc_relations = (
Asset.directory_services.through.objects
.filter(asset_id__in=asset_ids)
.values_list('asset_id', 'directoryservice_id')
)
for asset_id, ds_id in asset_dc_relations:
dc_ids.add(ds_id)
asset_id_dc_ids_mapper[asset_id].append(ds_id)
directory_services = (
DirectoryService.objects.filter(id__in=dc_ids)
.annotate(accounts_amount=Count('accounts'))
)
ds_accounts_amount_mapper = {ds.id: ds.accounts_amount for ds in directory_services}
for asset in assets:
asset_dc_ids = asset_id_dc_ids_mapper.get(asset.id, [])
for dc_id in asset_dc_ids:
ds_accounts = ds_accounts_amount_mapper.get(dc_id, 0)
asset.accounts_amount += ds_accounts
return assets
@property @property
def is_valid(self): def is_valid(self):
warning = '' warning = ''
@ -362,11 +300,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
@lazyproperty @lazyproperty
def gateway(self): def gateway(self):
if not self.zone_id: if not self.domain_id:
return return
if not self.platform.gateway_enabled: if not self.platform.domain_enabled:
return return
return self.zone.select_gateway() return self.domain.select_gateway()
def as_node(self): def as_node(self):
from assets.models import Node from assets.models import Node

View File

@ -1,14 +0,0 @@
from django.db import models
from django.utils.translation import gettext_lazy as _
from .common import Asset
__all__ = ['DirectoryService']
class DirectoryService(Asset):
domain_name = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Domain name"))
class Meta:
default_related_name = "ds"
verbose_name = _("Directory service")

View File

@ -23,28 +23,6 @@ class AbsConnectivity(models.Model):
self.date_verified = timezone.now() self.date_verified = timezone.now()
self.save(update_fields=['connectivity', 'date_verified']) self.save(update_fields=['connectivity', 'date_verified'])
@staticmethod
def get_err_connectivity(msg=None):
msg = (msg or '').strip().lower()
error_map = {
'permission denied': Connectivity.AUTH_ERR,
'authentication failed': Connectivity.AUTH_ERR,
'authentication failure': Connectivity.AUTH_ERR,
'is not in the sudoers file': Connectivity.SUDO_ERR,
'expected openssh key': Connectivity.OPENSSH_KEY_ERR,
'invalid/incorrect password': Connectivity.PASSWORD_ERR,
'failed to create directory': Connectivity.CREATE_DIR_ERR,
'ntlm: the specified credentials were rejected by the server': Connectivity.NTLM_ERR,
}
for key, value in error_map.items():
if key in msg:
return value
return Connectivity.ERR
@property @property
def is_connective(self): def is_connective(self):
if self.connectivity == Connectivity.OK: if self.connectivity == Connectivity.OK:

View File

@ -12,10 +12,10 @@ from .gateway import Gateway
logger = get_logger(__file__) logger = get_logger(__file__)
__all__ = ['Zone'] __all__ = ['Domain']
class Zone(LabeledMixin, JMSOrgBaseModel): class Domain(LabeledMixin, JMSOrgBaseModel):
name = models.CharField(max_length=128, verbose_name=_('Name')) name = models.CharField(max_length=128, verbose_name=_('Name'))
class Meta: class Meta:
@ -49,7 +49,7 @@ class Zone(LabeledMixin, JMSOrgBaseModel):
@property @property
def gateways(self): def gateways(self):
queryset = self.get_gateway_queryset().filter(zone=self) queryset = self.get_gateway_queryset().filter(domain=self)
return queryset return queryset
@classmethod @classmethod

View File

@ -101,8 +101,7 @@ class Platform(LabeledMixin, JMSBaseModel):
default=CharsetChoices.utf8, choices=CharsetChoices.choices, default=CharsetChoices.utf8, choices=CharsetChoices.choices,
max_length=8, verbose_name=_("Charset") max_length=8, verbose_name=_("Charset")
) )
gateway_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled")) domain_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
ds_enabled = models.BooleanField(default=False, verbose_name=_("DS enabled"))
# 账号有关的 # 账号有关的
su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled")) su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled"))
su_method = models.CharField(max_length=32, blank=True, null=True, verbose_name=_("Su method")) su_method = models.CharField(max_length=32, blank=True, null=True, verbose_name=_("Su method"))
@ -116,11 +115,6 @@ class Platform(LabeledMixin, JMSBaseModel):
def assets_amount(self): def assets_amount(self):
return self.assets.count() return self.assets.count()
def save(self, *args, **kwargs):
if not self.ds_enabled:
self.ds = None
super().save(*args, **kwargs)
@classmethod @classmethod
def default(cls): def default(cls):
linux, created = cls.objects.get_or_create( linux, created = cls.objects.get_or_create(

View File

@ -4,7 +4,6 @@ from .common import *
from .custom import * from .custom import *
from .database import * from .database import *
from .device import * from .device import *
from .ds import *
from .gpt import * from .gpt import *
from .host import * from .host import *
from .web import * from .web import *

View File

@ -147,20 +147,18 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=()) protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts')) accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path")) nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'), platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'), attrs=('id', 'name', 'type'))
attrs=('id', 'name', 'type'))
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount')) accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
_accounts = None _accounts = None
class Meta: class Meta:
model = Asset model = Asset
fields_fk = ['zone', 'platform'] fields_fk = ['domain', 'platform']
fields_mini = ['id', 'name', 'address'] + fields_fk fields_mini = ['id', 'name', 'address'] + fields_fk
fields_small = fields_mini + ['is_active', 'comment'] fields_small = fields_mini + ['is_active', 'comment']
fields_m2m = [ fields_m2m = [
'nodes', 'labels', 'protocols', 'nodes', 'labels', 'protocols',
'nodes_display', 'accounts', 'nodes_display', 'accounts',
'directory_services',
] ]
read_only_fields = [ read_only_fields = [
'accounts_amount', 'category', 'type', 'connectivity', 'auto_config', 'accounts_amount', 'category', 'type', 'connectivity', 'auto_config',
@ -174,11 +172,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
'address': {'label': _('Address')}, 'address': {'label': _('Address')},
'nodes_display': {'label': _('Node path')}, 'nodes_display': {'label': _('Node path')},
'nodes': {'allow_empty': True, 'label': _("Nodes")}, 'nodes': {'allow_empty': True, 'label': _("Nodes")},
'directory_services': {
'required': False,
'allow_empty': True,
'default': list, 'label': _("Directory service")
},
} }
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
@ -233,11 +226,15 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
@classmethod @classmethod
def setup_eager_loading(cls, queryset): def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """ """ Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('zone', 'nodes', 'protocols', 'directory_services') \ queryset = queryset.prefetch_related('domain', 'nodes', 'protocols', ) \
.prefetch_related('platform', 'platform__automation') \ .prefetch_related('platform', 'platform__automation') \
.annotate(category=F("platform__category")) \ .annotate(category=F("platform__category")) \
.annotate(type=F("platform__type")) \ .annotate(type=F("platform__type")) \
.annotate(accounts_amount=Count('accounts')) .annotate(accounts_amount=Count('accounts'))
if queryset.model is Asset:
queryset = queryset.prefetch_related('labels__label', 'labels')
else:
queryset = queryset.prefetch_related('asset_ptr__labels__label', 'asset_ptr__labels')
return queryset return queryset
@staticmethod @staticmethod
@ -271,9 +268,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
raise serializers.ValidationError({'platform': _("Platform not exist")}) raise serializers.ValidationError({'platform': _("Platform not exist")})
return platform return platform
def validate_zone(self, value): def validate_domain(self, value):
platform = self._asset_platform platform = self._asset_platform
if platform.gateway_enabled: if platform.domain_enabled:
return value return value
else: else:
return None return None

View File

@ -1,22 +0,0 @@
from django.utils.translation import gettext_lazy as _
from assets.models import DirectoryService
from .common import AssetSerializer
__all__ = ['DSSerializer']
class DSSerializer(AssetSerializer):
class Meta(AssetSerializer.Meta):
model = DirectoryService
fields = AssetSerializer.Meta.fields + [
'domain_name',
]
extra_kwargs = {
**AssetSerializer.Meta.extra_kwargs,
'domain_name': {
'help_text': _('The domain part used by the directory service (e.g., AD) and appended to '
'the username during login, such as example.com in user@example.com.'),
'label': _('Domain name')
}
}

View File

@ -6,7 +6,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
vendor = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('Vendor')) vendor = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('Vendor'))
model = serializers.CharField(max_length=54, required=False, allow_blank=True, label=_('Model')) model = serializers.CharField(max_length=54, required=False, allow_blank=True, label=_('Model'))
sn = serializers.CharField(max_length=128, required=False, allow_blank=True, label=_('Serial number')) sn = serializers.CharField(max_length=128, required=False, allow_blank=True, label=_('Serial number'))
cpu_model = serializers.CharField(allow_blank=True, required=False, label=_('CPU model')) cpu_model = serializers.CharField(max_length=64, allow_blank=True, required=False, label=_('CPU model'))
cpu_count = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU count')) cpu_count = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU count'))
cpu_cores = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU cores')) cpu_cores = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU cores'))
cpu_vcpus = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU vcpus')) cpu_vcpus = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU vcpus'))
@ -17,10 +17,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version')) distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch')) arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
gpu_model = serializers.CharField(allow_blank=True, required=False, label=_('GPU model'))
category_gathered_serializer_map = { category_gathered_serializer_map = {
'host': HostGatheredInfoSerializer, 'host': HostGatheredInfoSerializer,
'ds': HostGatheredInfoSerializer,
} }

Some files were not shown because too many files have changed in this diff Show More