Compare commits

..

33 Commits
dev ... v4.8.0

Author SHA1 Message Date
Bai
6d66ba5288 perf: keep top 2025-03-27 17:51:22 +08:00
Bryan
ad5460dab8
Merge pull request #15086 from jumpserver/dev
v4.8.0
2025-03-20 18:44:44 +08:00
Bryan
4d37dca0de
Merge pull request #14901 from jumpserver/dev
v4.7.0
2025-02-20 10:21:16 +08:00
Bryan
2ca4002624
Merge pull request #14813 from jumpserver/dev
v4.6.0
2025-01-15 14:38:17 +08:00
Bryan
053d640e4c
Merge pull request #14699 from jumpserver/dev
v4.5.0
2024-12-19 16:04:45 +08:00
Bryan
f3acc28ded
Merge pull request #14697 from jumpserver/dev
v4.5.0
2024-12-19 15:57:11 +08:00
Bryan
25987545db
Merge pull request #14511 from jumpserver/dev
v4.4.0
2024-11-21 19:00:35 +08:00
Bryan
6720ecc6e0
Merge pull request #14319 from jumpserver/dev
v4.3.0
2024-10-17 14:55:38 +08:00
老广
0b3a7bb020
Merge pull request #14203 from jumpserver/dev
merge: from dev to master
2024-09-19 19:37:19 +08:00
Bryan
56373e362b
Merge pull request #13988 from jumpserver/dev
v4.1.0
2024-08-16 18:40:35 +08:00
Bryan
02fc045370
Merge pull request #13600 from jumpserver/dev
v4.0.0
2024-07-03 19:04:35 +08:00
Bryan
e4ac73896f
Merge pull request #13452 from jumpserver/dev
v3.10.11-lts
2024-06-19 16:01:26 +08:00
Bryan
1518f792d6
Merge pull request #13236 from jumpserver/dev
v3.10.10-lts
2024-05-16 16:04:07 +08:00
Bai
67277dd622 fix: 修复仪表盘会话排序数量都是 1 的问题 2024-04-22 19:42:33 +08:00
Bryan
82e7f020ea
Merge pull request #13094 from jumpserver/dev
v3.10.9 (dev to master)
2024-04-22 19:39:53 +08:00
Bryan
f20b9e01ab
Merge pull request #13062 from jumpserver/dev
v3.10.8 dev to master
2024-04-18 18:01:20 +08:00
Bryan
8cf8a3701b
Merge pull request #13059 from jumpserver/dev
v3.10.8
2024-04-18 17:16:37 +08:00
Bryan
7ba24293d1
Merge pull request #12736 from jumpserver/pr@dev@master_fix
fix: 解决冲突
2024-02-29 16:38:43 +08:00
Bai
f10114c9ed fix: 解决冲突 2024-02-29 16:37:10 +08:00
Bryan
cf31cbfb07
Merge pull request #12729 from jumpserver/dev
v3.10.4
2024-02-29 16:19:59 +08:00
wangruidong
0edad24d5d fix: 资产过期消息提示发送失败 2024-02-04 11:41:48 +08:00
ibuler
1f1c1a9157 fix: 修复定时检测用户是否活跃任务无法执行的问题 2024-01-23 09:28:38 +00:00
feng
6c9d271ae1 fix: redis 密码有特殊字符celery beat启动失败 2024-01-22 06:18:34 +00:00
Bai
6ff852e225 perf: 修复 Count 时没有去重的问题 2024-01-22 06:16:25 +00:00
Bryan
baa75dc735
Merge pull request #12566 from jumpserver/master
v3.10.2
2024-01-17 07:34:28 -04:00
Bryan
8a9f0436b8
Merge pull request #12565 from jumpserver/dev
v3.10.2
2024-01-17 07:23:30 -04:00
Bryan
a9620a3cbe
Merge pull request #12461 from jumpserver/master
v3.10.1
2023-12-29 11:33:05 +05:00
Bryan
769e7dc8a0
Merge pull request #12460 from jumpserver/dev
v3.10.1
2023-12-29 11:20:36 +05:00
Bryan
2a70449411
Merge pull request #12458 from jumpserver/dev
v3.10.1
2023-12-29 11:01:13 +05:00
Bryan
8df720f19e
Merge pull request #12401 from jumpserver/dev
v3.10
2023-12-21 15:14:19 +05:00
老广
dabbb45f6e
Merge pull request #12144 from jumpserver/dev
v3.9.0
2023-11-16 18:23:05 +08:00
Bryan
ce24c1c3fd
Merge pull request #11914 from jumpserver/dev
v3.8.0
2023-10-19 03:37:39 -05:00
Bryan
3c54c82ce9
Merge pull request #11636 from jumpserver/dev
v3.7.0
2023-09-21 17:02:48 +08:00
271 changed files with 16352 additions and 25570 deletions

View File

@ -9,5 +9,3 @@ celerybeat.pid
apps/xpack/.git
.history/
.idea
.venv/
.env

4
.gitattributes vendored
View File

@ -0,0 +1,4 @@
*.mmdb filter=lfs diff=lfs merge=lfs -text
*.mo filter=lfs diff=lfs merge=lfs -text
*.ipdb filter=lfs diff=lfs merge=lfs -text
leak_passwords.db filter=lfs diff=lfs merge=lfs -text

View File

@ -1,10 +0,0 @@
version: 2
updates:
- package-ecosystem: "uv"
directory: "/"
schedule:
interval: "weekly"
day: "monday"
time: "09:30"
timezone: "Asia/Shanghai"
target-branch: dev

View File

@ -2,14 +2,10 @@ name: Translate README
on:
workflow_dispatch:
inputs:
source_readme:
description: "Source README"
required: false
default: "./readmes/README.en.md"
target_langs:
description: "Target Languages"
required: false
default: "zh-hans,zh-hant,ja,pt-br,es,ru"
default: "zh-hans,zh-hant,ja,pt-br"
gen_dir_path:
description: "Generate Dir Name"
required: false
@ -38,7 +34,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
GPT_MODE: ${{ github.event.inputs.gpt_mode }}
SOURCE_README: ${{ github.event.inputs.source_readme }}
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}

3
.gitignore vendored
View File

@ -46,6 +46,3 @@ test.py
.test/
*.mo
apps.iml
*.db
*.mmdb
*.ipdb

View File

@ -1,4 +1,4 @@
FROM jumpserver/core-base:20250427_062456 AS stage-build
FROM jumpserver/core-base:20250224_065619 AS stage-build
ARG VERSION

View File

@ -1,6 +1,6 @@
FROM python:3.11-slim-bullseye
ARG TARGETARCH
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
# Install APT dependencies
ARG DEPENDENCIES=" \
ca-certificates \
@ -43,19 +43,18 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
ENV LANG=en_US.UTF-8 \
PATH=/opt/py3/bin:$PATH
ENV UV_LINK_MODE=copy
RUN --mount=type=cache,target=/root/.cache \
--mount=type=bind,source=poetry.lock,target=poetry.lock \
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
set -ex \
&& uv venv \
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
&& ln -sf $(pwd)/.venv /opt/py3 \
&& bash utils/static_files.sh \
&& bash clean_site_packages.sh
&& python3 -m venv /opt/py3 \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& . /opt/py3/bin/activate \
&& poetry config virtualenvs.create false \
&& poetry install --no-cache --only main \
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
&& bash clean_site_packages.sh \
&& poetry cache clear pypi --all

View File

@ -24,7 +24,11 @@ RUN set -ex \
WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
COPY poetry.lock pyproject.toml ./
RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack
&& . /opt/py3/bin/activate \
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
&& poetry install --only xpack \
&& poetry cache clear pypi --all

View File

@ -1,18 +1,16 @@
<div align="center">
<a name="readme-top"></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
<a href="https://jumpserver.org/index-en.html"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link]
[![][docs-shield]][docs-link]
[![][deepwiki-shield]][deepwiki-link]
[![][discord-shield]][discord-link]
[![][docker-shield]][docker-link]
[![][github-release-shield]][github-release-link]
[![][github-stars-shield]][github-stars-link]
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md)
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md)
</div>
<br/>
@ -21,13 +19,7 @@
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
<picture>
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
</picture>
![JumpServer Overview](https://github.com/jumpserver/jumpserver/assets/32935519/35a371cb-8590-40ed-88ec-f351f8cf9045)
## Quickstart
@ -44,19 +36,18 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
[![JumpServer Quickstart](https://github.com/user-attachments/assets/0f32f52b-9935-485e-8534-336c63389612)](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
## Screenshots
<table style="border-collapse: collapse; border: 1px solid black;">
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>    
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>     
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
</tr>
<tr>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
@ -78,9 +69,9 @@ JumpServer consists of multiple key components, which collectively form the func
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
@ -90,6 +81,12 @@ JumpServer consists of multiple key components, which collectively form the func
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
## Security
JumpServer is a mission critical product. Please refer to the Basic Security Recommendations for installation and deployment. If you encounter any security-related issues, please contact us directly:
- Email: support@fit2cloud.com
## License
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
@ -103,7 +100,6 @@ Unless required by applicable law or agreed to in writing, software distributed
<!-- JumpServer official link -->
[docs-link]: https://jumpserver.com/docs
[discord-link]: https://discord.com/invite/W6vYXmAQG2
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
<!-- JumpServer Other link-->
@ -114,10 +110,10 @@ Unless required by applicable law or agreed to in writing, software distributed
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues
<!-- Shield link-->
[docs-shield]: https://img.shields.io/badge/documentation-148F76
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square   
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
<!-- Image link -->

View File

@ -5,7 +5,8 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
如果你发现安全问题,请直接联系我们,我们携手让世界更好:
- ibuler@fit2cloud.com
- support@lxware.hk
- support@fit2cloud.com
- 400-052-0755
# Security Policy
@ -15,5 +16,6 @@ JumpServer is a security product, The installation and development should follow
All security bugs should be reported to the contact as below:
- ibuler@fit2cloud.com
- support@lxware.hk
- support@fit2cloud.com
- 400-052-0755

View File

@ -46,16 +46,6 @@ class AccountViewSet(OrgBulkModelViewSet):
}
export_as_zip = True
def get_queryset(self):
queryset = super().get_queryset()
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
if not asset_id:
return queryset
asset = get_object_or_404(Asset, pk=asset_id)
queryset = asset.all_accounts.all()
return queryset
@action(methods=['get'], detail=False, url_path='su-from-accounts')
def su_from_accounts(self, request, *args, **kwargs):
account_id = request.query_params.get('account')
@ -127,7 +117,7 @@ class AccountViewSet(OrgBulkModelViewSet):
self.model.objects.create(**account_data)
success_count += 1
except Exception as e:
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}')
logger.debug(f'{ "Move" if move else "Copy" } to assets error: {e}')
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]

View File

@ -62,7 +62,8 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
)
def get_once_secret(self, request, *args, **kwargs):
instance = self.get_object()
return Response(data={'id': instance.id, 'secret': instance.secret})
secret = instance.get_secret()
return Response(data={'id': instance.id, 'secret': secret})
@action(['GET'], detail=False, url_path='account-secret',
permission_classes=[RBACPermission])

View File

@ -17,7 +17,7 @@ from orgs.mixins import generics
__all__ = [
'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
'AutomationExecutionViewSet'
'AutomationExecutionViewSet', 'RecordListMixin'
]
@ -39,10 +39,9 @@ class AutomationAssetsListApi(generics.ListAPIView):
return assets
class AutomationRemoveAssetApi(generics.UpdateAPIView):
class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@ -57,10 +56,9 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
return Response({'msg': 'ok'})
class AutomationAddAssetApi(generics.UpdateAPIView):
class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
instance = self.get_object()
@ -74,10 +72,9 @@ class AutomationAddAssetApi(generics.UpdateAPIView):
return Response({"error": serializer.errors})
class AutomationNodeAddRemoveApi(generics.UpdateAPIView):
class AutomationNodeAddRemoveApi(generics.RetrieveUpdateAPIView):
model = BaseAutomation
serializer_class = serializers.UpdateNodeSerializer
http_method_names = ['patch']
def update(self, request, *args, **kwargs):
action_params = ['add', 'remove']
@ -127,3 +124,12 @@ class AutomationExecutionViewSet(
execution = self.get_object()
report = execution.manager.gen_report()
return HttpResponse(report)
class RecordListMixin:
def list(self, request, *args, **kwargs):
try:
response = super().list(request, *args, **kwargs)
except Exception as e:
response = Response({'detail': str(e)}, status=status.HTTP_400_BAD_REQUEST)
return response

View File

@ -16,7 +16,7 @@ from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from rbac.permissions import RBACPermission
from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
)
__all__ = [
@ -35,7 +35,7 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
serializer_class = serializers.ChangeSecretAutomationSerializer
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
class ChangeSecretRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = ChangeSecretRecordFilterSet
permission_classes = [RBACPermission, IsValidLicense]
search_fields = ('asset__address', 'account__username')

View File

@ -147,7 +147,6 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
serializer_class = serializers.CheckAccountEngineSerializer
permission_classes = [RBACPermission, IsValidLicense]
perm_model = CheckAccountEngine
http_method_names = ['get', 'options']
def get_queryset(self):
return CheckAccountEngine.get_default_engines()

View File

@ -9,7 +9,7 @@ from accounts.models import PushAccountAutomation, PushSecretRecord
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
from .base import (
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
)
__all__ = [
@ -42,7 +42,7 @@ class PushAccountExecutionViewSet(AutomationExecutionViewSet):
return queryset
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
class PushAccountRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
filterset_class = PushAccountRecordFilterSet
search_fields = ('asset__address', 'account__username')
ordering_fields = ('date_finished',)

View File

@ -69,7 +69,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
return
asset = privilege_account.asset
accounts = asset.all_accounts.all()
accounts = asset.accounts.all()
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
if self.secret_type:
@ -94,7 +94,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
h['account'] = {
'name': account.name,
'username': account.username,
'full_username': account.full_username,
'secret_type': secret_type,
'secret': account.escape_jinja2_syntax(new_secret),
'private_key_path': private_key_path,

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,7 +112,5 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "ssh_key" and check_conn_after_change
delegate_to: localhost

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,7 +112,5 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "ssh_key" and check_conn_after_change
delegate_to: localhost

View File

@ -1,27 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Change password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -1,27 +0,0 @@
id: change_secret_ad_windows
name: "{{ 'Windows account change secret' | trans }}"
version: 1
method: change_secret
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account change secret:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
en: 'Using Ansible module win_domain_user to change Windows account secret'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
size 2547712

View File

@ -13,7 +13,6 @@ def parse_date(date_str, default=None):
formats = [
'%Y/%m/%d %H:%M:%S',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%d-%m-%Y %H:%M:%S',
'%Y/%m/%d',
'%d-%m-%Y',
@ -27,6 +26,7 @@ def parse_date(date_str, default=None):
return default
# TODO 后期会挪到 playbook 中
class GatherAccountsFilter:
def __init__(self, tp):
self.tp = tp
@ -208,35 +208,14 @@ class GatherAccountsFilter:
key, value = parts
user_info[key.strip()] = value.strip()
detail = {'groups': user_info.get('Global Group memberships', ''), }
username = user_info.get('User name')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('Password last set')),
'date_password_expired': parse_date(user_info.get('Password expires')),
'date_last_login': parse_date(user_info.get('Last logon')),
'groups': detail,
}
return result
@staticmethod
def windows_ad_filter(info):
result = {}
for user_info in info['user_details']:
detail = {'groups': user_info.get('GlobalGroupMemberships', ''), }
username = user_info.get('SamAccountName')
if not username:
continue
result[username] = {
'username': username,
'date_password_change': parse_date(user_info.get('PasswordLastSet')),
'date_password_expired': parse_date(user_info.get('PasswordExpires')),
'date_last_login': parse_date(user_info.get('LastLogonDate')),
user = {
'username': user_info.get('User name', ''),
'date_password_change': parse_date(user_info.get('Password last set', '')),
'date_password_expired': parse_date(user_info.get('Password expires', '')),
'date_last_login': parse_date(user_info.get('Last logon', '')),
'groups': detail,
}
result[user['username']] = user
return result
@staticmethod

View File

@ -4,7 +4,6 @@
- name: Run net user command to get all users
win_shell: net user
register: user_list_output
failed_when: false
- name: Parse all users from net user command
set_fact:

View File

@ -2,13 +2,10 @@ id: gather_accounts_windows
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category:
- host
category: host
type:
- windows
i18n:
Windows account gather:
zh: 使用命令 net user 收集 Windows 账号

View File

@ -1,74 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Import ActiveDirectory module
win_shell: Import-Module ActiveDirectory
args:
warn: false
- name: Get the SamAccountName list of all AD users
win_shell: |
Import-Module ActiveDirectory
Get-ADUser -Filter * | Select-Object -ExpandProperty SamAccountName
register: ad_user_list
- name: Set the all_users variable
set_fact:
all_users: "{{ ad_user_list.stdout_lines }}"
- name: Get detailed information for each user
win_shell: |
Import-Module ActiveDirectory
$user = Get-ADUser -Identity {{ item }} -Properties Name, SamAccountName, Enabled, LastLogonDate, PasswordLastSet, msDS-UserPasswordExpiryTimeComputed, MemberOf
$globalGroups = @()
if ($user.MemberOf) {
$globalGroups = $user.MemberOf | ForEach-Object {
try {
$group = Get-ADGroup $_ -ErrorAction Stop
if ($group.GroupScope -eq 'Global') { $group.Name }
} catch {
}
}
}
$passwordExpiry = $null
$expiryRaw = $user.'msDS-UserPasswordExpiryTimeComputed'
if ($expiryRaw) {
try {
$passwordExpiry = [datetime]::FromFileTime($expiryRaw)
} catch {
$passwordExpiry = $null
}
}
$output = [PSCustomObject]@{
Name = $user.Name
SamAccountName = $user.SamAccountName
Enabled = $user.Enabled
LastLogonDate = if ($user.LastLogonDate) { $user.LastLogonDate.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordLastSet = if ($user.PasswordLastSet) { $user.PasswordLastSet.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
PasswordExpires = if ($passwordExpiry) { $passwordExpiry.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
GlobalGroupMemberships = $globalGroups
}
$output | ConvertTo-Json -Depth 3
loop: "{{ all_users }}"
register: ad_user_details
ignore_errors: yes
- set_fact:
info:
user_details: >-
{{
ad_user_details.results
| selectattr('rc', 'equalto', 0)
| map(attribute='stdout')
| select('truthy')
| map('from_json')
}}
- debug:
var: info

View File

@ -1,15 +0,0 @@
id: gather_accounts_windows_ad
name: "{{ 'Windows account gather' | trans }}"
version: 1
method: gather_accounts
category:
- ds
type:
- windows_ad
i18n:
Windows account gather:
zh: 使用命令 Get-ADUser 收集 Windows 账号
ja: コマンド Get-ADUser を使用して Windows アカウントを収集する
en: Using command Get-ADUser to gather accounts

View File

@ -1,6 +1,6 @@
import time
from collections import defaultdict
import time
from django.utils import timezone
from accounts.const import AutomationTypes
@ -222,7 +222,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
def _collect_asset_account_info(self, asset, info):
result = self._filter_success_result(asset.type, info)
accounts = []
for username, info in result.items():
self.asset_usernames_mapper[str(asset.id)].add(username)
@ -374,7 +373,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
for asset, accounts_data in self.asset_account_info.items():
ori_users = self.ori_asset_usernames[str(asset.id)]
need_analyser_gather_account = []
with tmp_to_org(asset.org_id):
for d in accounts_data:
username = d["username"]
@ -387,11 +385,10 @@ class GatherAccountsManager(AccountBasePlaybookManager):
ga = ori_account
self.update_gathered_account(ori_account, d)
ori_found = username in ori_users
need_analyser_gather_account.append((asset, ga, d, ori_found))
risk_analyser.analyse_risk(asset, ga, d, ori_found)
self.create_gathered_account.finish()
self.update_gathered_account.finish()
for analysis_data in need_analyser_gather_account:
risk_analyser.analyse_risk(*analysis_data)
self.update_gather_accounts_status(asset)
if not self.is_sync_account:
continue

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('des') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,8 +112,6 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "ssh_key" and check_conn_after_change
delegate_to: localhost

View File

@ -41,7 +41,6 @@
password: "{{ account.secret | password_hash('sha512') }}"
update_password: always
ignore_errors: true
register: change_secret_result
when: account.secret_type == "password"
- name: "Get home directory for {{ account.username }}"
@ -84,7 +83,6 @@
user: "{{ account.username }}"
key: "{{ account.secret }}"
exclusive: "{{ ssh_params.exclusive }}"
register: change_secret_result
when: account.secret_type == "ssh_key"
- name: Refresh connection
@ -103,9 +101,7 @@
become_password: "{{ account.become.ansible_password | default('') }}"
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "password"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "password" and check_conn_after_change
delegate_to: localhost
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
@ -116,8 +112,6 @@
login_private_key_path: "{{ account.private_key_path }}"
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
when:
- account.secret_type == "ssh_key"
- check_conn_after_change or change_secret_result.failed | default(false)
when: account.secret_type == "ssh_key" and check_conn_after_change
delegate_to: localhost

View File

@ -1,27 +0,0 @@
- hosts: demo
gather_facts: no
tasks:
- name: Test privileged account
ansible.windows.win_ping:
- name: Push user password
community.windows.win_domain_user:
name: "{{ account.username }}"
password: "{{ account.secret }}"
update_password: always
password_never_expires: yes
state: present
groups: "{{ params.groups }}"
groups_action: add
ignore_errors: true
when: account.secret_type == "password"
- name: Refresh connection
ansible.builtin.meta: reset_connection
- name: Verify password
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_password: "{{ account.secret }}"
when: account.secret_type == "password" and check_conn_after_change

View File

@ -1,25 +0,0 @@
id: push_account_ad_windows
name: "{{ 'Windows account push' | trans }}"
version: 1
method: push_account
category:
- ds
type:
- windows_ad
params:
- name: groups
type: str
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account push:
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号推送'
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントをプッシュする'
en: 'Using Ansible module win_domain_user to push account'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'

View File

@ -11,5 +11,4 @@
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
name: "{{ jms_asset.spec_info.db_name }}"
script: "DROP LOGIN {{ account.username }}; select @@version"
script: "DROP USER {{ account.username }}"

View File

@ -1,9 +0,0 @@
- hosts: windows
gather_facts: no
tasks:
- name: "Remove account"
ansible.windows.win_domain_user:
name: "{{ account.username }}"
state: absent

View File

@ -1,14 +0,0 @@
id: remove_account_ad_windows
name: "{{ 'Windows account remove' | trans }}"
version: 1
method: remove_account
category:
- ds
type:
- windows_ad
i18n:
Windows account remove:
zh: 使用 Ansible 模块 win_domain_user 删除账号
ja: Ansible モジュール win_domain_user を使用してアカウントを削除する
en: Use the Ansible module win_domain_user to delete an account

View File

@ -10,6 +10,6 @@
rdp_ping:
login_host: "{{ jms_asset.address }}"
login_port: "{{ jms_asset.port }}"
login_user: "{{ account.full_username }}"
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
login_secret_type: "{{ account.secret_type }}"

View File

@ -2,10 +2,8 @@ id: verify_account_by_rdp
name: "{{ 'Windows rdp account verify' | trans }}"
category:
- host
- ds
type:
- windows
- windows_ad
method: verify_account
protocol: rdp
priority: 1

View File

@ -7,5 +7,5 @@
- name: Verify account
ansible.windows.win_ping:
vars:
ansible_user: "{{ account.full_username }}"
ansible_user: "{{ account.username }}"
ansible_password: "{{ account.secret }}"

View File

@ -2,12 +2,9 @@ id: verify_account_windows
name: "{{ 'Windows account verify' | trans }}"
version: 1
method: verify_account
category:
- host
- ds
category: host
type:
- windows
- windows_ad
i18n:
Windows account verify:

View File

@ -42,7 +42,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
if host.get('error'):
return host
accounts = asset.all_accounts.all()
accounts = asset.accounts.all()
accounts = self.get_accounts(account, accounts)
inventory_hosts = []
@ -64,7 +64,6 @@ class VerifyAccountManager(AccountBasePlaybookManager):
h['account'] = {
'name': account.name,
'username': account.username,
'full_username': account.full_username,
'secret_type': account.secret_type,
'secret': account.escape_jinja2_syntax(secret),
'private_key_path': private_key_path,
@ -85,7 +84,6 @@ class VerifyAccountManager(AccountBasePlaybookManager):
def on_host_error(self, host, error, result):
account = self.host_account_mapper.get(host)
try:
error_tp = account.get_err_connectivity(error)
account.set_connectivity(error_tp)
account.set_connectivity(Connectivity.ERR)
except Exception as e:
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')

View File

@ -5,6 +5,7 @@ import uuid
import django_filters
from django.db.models import Q
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from django_filters import rest_framework as drf_filters
from rest_framework import filters
from rest_framework.compat import coreapi
@ -12,26 +13,11 @@ from rest_framework.compat import coreapi
from assets.models import Node
from assets.utils import get_node_from_request
from common.drf.filters import BaseFilterSet
from common.utils import get_logger
from common.utils.timezone import local_zero_hour, local_now
from .const.automation import ChangeSecretRecordStatusChoice
from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \
AutomationExecution
logger = get_logger(__file__)
class UUIDFilterMixin:
@staticmethod
def filter_uuid(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
logger.warning(f"Invalid UUID: {value}")
return queryset.none()
return queryset.filter(**{name: value})
class NodeFilterBackend(filters.BaseFilterBackend):
fields = ['node_id']
@ -57,15 +43,14 @@ class NodeFilterBackend(filters.BaseFilterBackend):
return queryset
class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
class AccountFilterSet(BaseFilterSet):
ip = drf_filters.CharFilter(field_name="address", lookup_expr="exact")
name = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
hostname = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
username = drf_filters.CharFilter(field_name="username", lookup_expr="exact")
address = drf_filters.CharFilter(field_name="asset__address", lookup_expr="exact")
asset_name = drf_filters.CharFilter(field_name="asset__name", lookup_expr="exact")
asset_id = drf_filters.CharFilter(field_name="asset", method="filter_uuid")
assets = drf_filters.CharFilter(field_name="asset_id", method="filter_uuid")
asset_id = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
asset = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
assets = drf_filters.CharFilter(field_name="asset_id", lookup_expr="exact")
has_secret = drf_filters.BooleanFilter(method="filter_has_secret")
platform = drf_filters.CharFilter(
field_name="asset__platform_id", lookup_expr="exact"
@ -150,9 +135,8 @@ class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
kwargs.update({"date_change_secret__gt": date})
if name == "latest_secret_change_failed":
queryset = (
queryset.filter(date_change_secret__gt=date)
.exclude(change_secret_status=ChangeSecretRecordStatusChoice.success)
queryset = queryset.filter(date_change_secret__gt=date).exclude(
change_secret_status=ChangeSecretRecordStatusChoice.success
)
if kwargs:
@ -162,8 +146,8 @@ class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
class Meta:
model = Account
fields = [
"id", "source_id", "secret_type", "category", "type",
"privileged", "secret_reset", "connectivity", "is_active"
"id", "asset", "source_id", "secret_type", "category",
"type", "privileged", "secret_reset", "connectivity", 'is_active'
]
@ -201,6 +185,16 @@ class SecretRecordMixin(drf_filters.FilterSet):
return queryset.filter(date_finished__gte=dt)
class UUIDExecutionFilterMixin:
@staticmethod
def filter_execution(queryset, name, value):
try:
uuid.UUID(value)
except ValueError:
raise ValueError(_('Enter a valid UUID.'))
return queryset.filter(**{name: value})
class DaysExecutionFilterMixin:
days = drf_filters.NumberFilter(method="filter_days")
field: str
@ -215,10 +209,10 @@ class DaysExecutionFilterMixin:
class ChangeSecretRecordFilterSet(
SecretRecordMixin, UUIDFilterMixin,
SecretRecordMixin, UUIDExecutionFilterMixin,
DaysExecutionFilterMixin, BaseFilterSet
):
execution_id = django_filters.CharFilter(method="filter_uuid")
execution_id = django_filters.CharFilter(method="filter_execution")
days = drf_filters.NumberFilter(method="filter_days")
field = 'date_finished'
@ -236,8 +230,8 @@ class AutomationExecutionFilterSet(DaysExecutionFilterMixin, BaseFilterSet):
fields = ["days", 'trigger', 'automation_id', 'automation__name']
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterSet):
execution_id = django_filters.CharFilter(method="filter_uuid")
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDExecutionFilterMixin, BaseFilterSet):
execution_id = django_filters.CharFilter(method="filter_execution")
class Meta:
model = PushSecretRecord

View File

@ -1,15 +1,65 @@
from rest_framework.response import Response
from rest_framework import status
from django.db.models import Model
from django.utils import translation
from django.utils.translation import gettext_noop
from audits.const import ActionChoices
from audits.handler import create_or_update_operate_log
from common.views.mixins import RecordViewLogMixin
from common.utils import i18n_fmt
class AccountRecordViewLogMixin(object):
class AccountRecordViewLogMixin(RecordViewLogMixin):
get_object: callable
model: Model
get_queryset: callable
@staticmethod
def _filter_params(params):
new_params = {}
need_pop_params = ('format', 'order')
for key, value in params.items():
if key in need_pop_params:
continue
if isinstance(value, list):
value = list(filter(None, value))
if value:
new_params[key] = value
return new_params
def get_resource_display(self, request):
query_params = dict(request.query_params)
params = self._filter_params(query_params)
spm_filter = params.pop("spm", None)
if not params and not spm_filter:
display_message = gettext_noop("Export all")
elif spm_filter:
display_message = gettext_noop("Export only selected items")
else:
query = ",".join(
["%s=%s" % (key, value) for key, value in params.items()]
)
display_message = i18n_fmt(gettext_noop("Export filtered: %s"), query)
return display_message
@property
def detail_msg(self):
return i18n_fmt(
gettext_noop('User %s view/export secret'), self.request.user
)
def list(self, request, *args, **kwargs):
list_func = getattr(super(), 'list')
if not callable(list_func):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = list_func(request, *args, **kwargs)
with translation.override('en'):
resource_display = self.get_resource_display(request)
ids = [q.id for q in self.get_queryset()]
self.record_logs(
ids, ActionChoices.view, self.detail_msg, resource_display=resource_display
)
return response
def retrieve(self, request, *args, **kwargs):
retrieve_func = getattr(super(), 'retrieve')
@ -17,9 +67,9 @@ class AccountRecordViewLogMixin(object):
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
response = retrieve_func(request, *args, **kwargs)
with translation.override('en'):
create_or_update_operate_log(
ActionChoices.view, self.model._meta.verbose_name,
force=True, resource=self.get_object(),
resource = self.get_object()
self.record_logs(
[resource.id], ActionChoices.view, self.detail_msg, resource=resource
)
return response

View File

@ -131,46 +131,9 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
@lazyproperty
def alias(self):
"""
别称因为有虚拟账号@INPUT @MANUAL @USER, 否则为 id
"""
if self.username.startswith('@'):
return self.username
return str(self.id)
def is_virtual(self):
"""
不要用 username 去判断因为可能是构造的 account 对象设置了同名账号的用户名,
"""
return self.alias.startswith('@')
def is_ds_account(self):
if self.is_virtual():
return ''
if not self.asset.is_directory_service:
return False
return True
@lazyproperty
def ds(self):
if not self.is_ds_account():
return None
return self.asset.ds
@lazyproperty
def ds_domain(self):
"""这个不能去掉perm_account 会动态设置这个值,以更改 full_username"""
if self.is_virtual():
return ''
if self.ds and self.ds.domain_name:
return self.ds.domain_name
return ''
@property
def full_username(self):
if self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username
return self.name
@lazyproperty
def has_secret(self):

View File

@ -92,9 +92,8 @@ class VirtualAccount(JMSOrgBaseModel):
from .account import Account
username = user.username
alias = AliasAccount.USER.value
with tmp_to_org(asset.org):
same_account = cls.objects.filter(alias=alias).first()
same_account = cls.objects.filter(alias='@USER').first()
secret = ''
if same_account and same_account.secret_from_login:
@ -102,6 +101,4 @@ class VirtualAccount(JMSOrgBaseModel):
if not secret and not from_permed:
secret = input_secret
account = Account(name=AliasAccount.USER.label, username=username, secret=secret)
account.alias = alias
return account
return Account(name=AliasAccount.USER.label, username=username, secret=secret)

View File

@ -233,7 +233,6 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
required=False, queryset=Account.objects, allow_null=True, allow_empty=True,
label=_('Su from'), attrs=('id', 'name', 'username')
)
ds = ObjectRelatedField(read_only=True, label=_('Directory service'), attrs=('id', 'name', 'domain_name'))
class Meta(BaseAccountSerializer.Meta):
model = Account
@ -242,7 +241,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
'date_change_secret', 'change_secret_status'
]
fields = BaseAccountSerializer.Meta.fields + [
'su_from', 'asset', 'version', 'ds',
'su_from', 'asset', 'version',
'source', 'source_id', 'secret_reset',
] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields
read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields
@ -259,7 +258,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
queryset = queryset.prefetch_related(
'asset', 'asset__platform',
'asset__platform__automation'
)
).prefetch_related('labels', 'labels__label')
return queryset

View File

@ -1,11 +1,9 @@
from django.templatetags.static import static
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
from accounts.models import IntegrationApplication
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
from common.serializers.fields import JSONManyToManyField
from common.utils import random_string
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
@ -29,18 +27,13 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
'name': {'label': _('Name')},
'accounts_amount': {'label': _('Accounts amount')},
'is_active': {'default': True},
'logo': {'required': False},
}
def to_representation(self, instance):
data = super().to_representation(instance)
if not data.get('logo'):
data['logo'] = static('img/logo.png')
return data
def validate(self, attrs):
attrs['secret'] = random_string(36)
return attrs
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
request_method = self.context.get('request').method
if request_method == 'PUT':
self.fields['logo'].required = False
class IntegrationAccountSecretSerializer(serializers.Serializer):

View File

@ -129,7 +129,7 @@
</tbody>
</table>
{% else %}
<p class="no-data">{% trans 'No lost accounts found' %}</p>
<p class="no-data">{% trans 'No new accounts found' %}</p>
{% endif %}
</div>
</section>

View File

@ -8,6 +8,6 @@ class ActionChoices(models.TextChoices):
review = 'review', _('Review')
warning = 'warning', _('Warn')
notice = 'notice', _('Notify')
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
notify_and_warn = 'notify_and_warn', _('Notify and warn')
face_verify = 'face_verify', _('Face Verify')
face_online = 'face_online', _('Face Online')

View File

@ -18,12 +18,7 @@ class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
class Meta(BaseUserACLSerializer.Meta):
model = LoginACL
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
action_choices_exclude = [
ActionChoices.warning,
ActionChoices.notify_and_warn,
ActionChoices.face_online,
ActionChoices.face_verify
]
action_choices_exclude = [ActionChoices.face_online, ActionChoices.face_verify]
def get_rules_serializer(self):
return RuleSerializer()

View File

@ -1,10 +1,10 @@
from .asset import *
from .category import *
from .domain import *
from .favorite_asset import *
from .mixin import *
from .my_asset import *
from .node import *
from .platform import *
from .protocol import *
from .tree import *
from .zone import *
from .my_asset import *

View File

@ -3,7 +3,6 @@ from .cloud import *
from .custom import *
from .database import *
from .device import *
from .ds import *
from .gpt import *
from .host import *
from .permission import *

View File

@ -11,7 +11,6 @@ from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.status import HTTP_200_OK
from accounts.serializers import AccountSerializer
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
from assets import serializers
from assets.exceptions import NotSupportedTemporarilyError
@ -37,12 +36,12 @@ class AssetFilterSet(BaseFilterSet):
platform = drf_filters.CharFilter(method='filter_platform')
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
zone = drf_filters.CharFilter(method='filter_zone')
domain = drf_filters.CharFilter(method='filter_domain')
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
protocols = drf_filters.CharFilter(method='filter_protocols')
gateway_enabled = drf_filters.BooleanFilter(
field_name="platform__gateway_enabled", lookup_expr="exact"
domain_enabled = drf_filters.BooleanFilter(
field_name="platform__domain_enabled", lookup_expr="exact"
)
ping_enabled = drf_filters.BooleanFilter(
field_name="platform__automation__ping_enabled", lookup_expr="exact"
@ -85,11 +84,11 @@ class AssetFilterSet(BaseFilterSet):
return queryset
@staticmethod
def filter_zone(queryset, name, value):
def filter_domain(queryset, name, value):
if is_uuid(value):
return queryset.filter(zone_id=value)
return queryset.filter(domain_id=value)
else:
return queryset.filter(zone__name__contains=value)
return queryset.filter(domain__name__contains=value)
@staticmethod
def filter_protocols(queryset, name, value):
@ -97,7 +96,7 @@ class AssetFilterSet(BaseFilterSet):
return queryset.filter(protocols__name__in=value).distinct()
class BaseAssetViewSet(OrgBulkModelViewSet):
class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
"""
API endpoint that allows Asset to be viewed or edited.
"""
@ -110,19 +109,18 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
("platform", serializers.PlatformSerializer),
("suggestion", serializers.MiniAssetSerializer),
("gateways", serializers.GatewaySerializer),
("accounts", AccountSerializer),
)
rbac_perms = (
("match", "assets.match_asset"),
("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"),
("accounts", "assets.view_account"),
("spec_info", "assets.view_asset"),
("gathered_info", "assets.view_asset"),
("sync_platform_protocols", "assets.change_asset"),
)
extra_filter_backends = [
IpInFilterBackend, NodeFilterBackend, AttrRulesFilterBackend
IpInFilterBackend,
NodeFilterBackend, AttrRulesFilterBackend
]
def perform_destroy(self, instance):
@ -143,25 +141,6 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
return retrieve_cls
return cls
def paginate_queryset(self, queryset):
page = super().paginate_queryset(queryset)
if page:
page = Asset.compute_all_accounts_amount(page)
return page
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="platform")
def platform(self, *args, **kwargs):
asset = super().get_object()
@ -171,10 +150,10 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
@action(methods=["GET"], detail=True, url_path="gateways")
def gateways(self, *args, **kwargs):
asset = self.get_object()
if not asset.zone:
if not asset.domain:
gateways = Gateway.objects.none()
else:
gateways = asset.zone.gateways
gateways = asset.domain.gateways
return self.get_paginated_response_from_queryset(gateways)
@action(methods=['post'], detail=False, url_path='sync-platform-protocols')
@ -210,6 +189,17 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
Protocol.objects.bulk_create(objs)
return Response(status=status.HTTP_200_OK)
def create(self, request, *args, **kwargs):
if request.path.find('/api/v1/assets/assets/') > -1:
error = _('Cannot create asset directly, you should create a host or other')
return Response({'error': error}, status=400)
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
error = _('The number of assets exceeds the limit of 5000')
return Response({'error': error}, status=400)
return super().create(request, *args, **kwargs)
def filter_bulk_update_data(self):
bulk_data = []
skip_assets = []

View File

@ -1,12 +1,12 @@
from assets.models import Cloud, Asset
from assets.serializers import CloudSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['CloudViewSet']
class CloudViewSet(BaseAssetViewSet):
class CloudViewSet(AssetViewSet):
model = Cloud
perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Custom, Asset
from assets.serializers import CustomSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['CustomViewSet']
class CustomViewSet(BaseAssetViewSet):
class CustomViewSet(AssetViewSet):
model = Custom
perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Database, Asset
from assets.serializers import DatabaseSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['DatabaseViewSet']
class DatabaseViewSet(BaseAssetViewSet):
class DatabaseViewSet(AssetViewSet):
model = Database
perm_model = Asset

View File

@ -1,11 +1,11 @@
from assets.models import Device, Asset
from assets.serializers import DeviceSerializer
from .asset import BaseAssetViewSet
from assets.models import Device, Asset
from .asset import AssetViewSet
__all__ = ['DeviceViewSet']
class DeviceViewSet(BaseAssetViewSet):
class DeviceViewSet(AssetViewSet):
model = Device
perm_model = Asset

View File

@ -1,16 +0,0 @@
from assets.models import DirectoryService, Asset
from assets.serializers import DSSerializer
from .asset import BaseAssetViewSet
__all__ = ['DSViewSet']
class DSViewSet(BaseAssetViewSet):
model = DirectoryService
perm_model = Asset
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
serializer_classes['default'] = DSSerializer
return serializer_classes

View File

@ -1,12 +1,12 @@
from assets.models import GPT, Asset
from assets.serializers import GPTSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['GPTViewSet']
class GPTViewSet(BaseAssetViewSet):
class GPTViewSet(AssetViewSet):
model = GPT
perm_model = Asset

View File

@ -1,11 +1,11 @@
from assets.models import Host, Asset
from assets.serializers import HostSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['HostViewSet']
class HostViewSet(BaseAssetViewSet):
class HostViewSet(AssetViewSet):
model = Host
perm_model = Asset

View File

@ -1,12 +1,12 @@
from assets.models import Web, Asset
from assets.serializers import WebSerializer
from .asset import BaseAssetViewSet
from .asset import AssetViewSet
__all__ = ['WebViewSet']
class WebViewSet(BaseAssetViewSet):
class WebViewSet(AssetViewSet):
model = Web
perm_model = Asset

View File

@ -9,24 +9,24 @@ from common.utils import get_logger
from orgs.mixins.api import OrgBulkModelViewSet
from .asset import HostViewSet
from .. import serializers
from ..models import Zone, Gateway
from ..models import Domain, Gateway
logger = get_logger(__file__)
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
class ZoneViewSet(OrgBulkModelViewSet):
model = Zone
class DomainViewSet(OrgBulkModelViewSet):
model = Domain
filterset_fields = ("name",)
search_fields = filterset_fields
serializer_classes = {
'default': serializers.ZoneSerializer,
'list': serializers.ZoneListSerializer,
'default': serializers.DomainSerializer,
'list': serializers.DomainListSerializer,
}
def get_serializer_class(self):
if self.request.query_params.get('gateway'):
return serializers.ZoneWithGatewaySerializer
return serializers.DomainWithGatewaySerializer
return super().get_serializer_class()
def partial_update(self, request, *args, **kwargs):
@ -36,8 +36,8 @@ class ZoneViewSet(OrgBulkModelViewSet):
class GatewayViewSet(HostViewSet):
perm_model = Gateway
filterset_fields = ("zone__name", "name", "zone")
search_fields = ("zone__name",)
filterset_fields = ("domain__name", "name", "domain")
search_fields = ("domain__name",)
def get_serializer_classes(self):
serializer_classes = super().get_serializer_classes()
@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
return serializer_classes
def get_queryset(self):
queryset = Zone.get_gateway_queryset()
queryset = Domain.get_gateway_queryset()
return queryset
@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
}
def get_queryset(self):
queryset = Zone.get_gateway_queryset()
queryset = Domain.get_gateway_queryset()
return queryset
def post(self, request, *args, **kwargs):

View File

@ -52,7 +52,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
queryset = (
super().get_queryset()
.annotate(assets_amount=Coalesce(Subquery(asset_count_subquery), Value(0)))
.prefetch_related('protocols', 'automation')
.prefetch_related('protocols', 'automation', 'labels', 'labels__label')
)
queryset = queryset.filter(type__in=AllTypes.get_types_values())
return queryset

View File

@ -3,10 +3,10 @@ import json
import logging
import os
import shutil
import time
from collections import defaultdict
from socket import gethostname
import time
import yaml
from django.conf import settings
from django.template.loader import render_to_string
@ -334,8 +334,7 @@ class PlaybookPrepareMixin:
return sub_playbook_path
def check_automation_enabled(self, platform, assets):
automation = getattr(platform, 'automation', None)
if not (automation and getattr(automation, 'ansible_enabled', False)):
if not platform.automation or not platform.automation.ansible_enabled:
print(_(" - Platform {} ansible disabled").format(platform.name))
self.on_assets_not_ansible_enabled(assets)
return False

View File

@ -1,5 +1,3 @@
from collections import Counter
__all__ = ['FormatAssetInfo']
@ -9,37 +7,13 @@ class FormatAssetInfo:
self.tp = tp
@staticmethod
def get_cpu_model_count(cpus):
try:
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
model_counts = Counter(models)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing CPU model list: {e}")
result = ''
return result
@staticmethod
def get_gpu_model_count(gpus):
try:
model_counts = Counter(gpus)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing GPU model list: {e}")
result = ''
return result
def posix_format(self, info):
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
info['gpu_model'] = gpus
info['cpu_model'] = cpus
def posix_format(info):
for cpu_model in info.get('cpu_model', []):
if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
break
else:
cpu_model = ''
info['cpu_model'] = cpu_model[:48]
info['cpu_count'] = info.get('cpu_count', 0)
return info

View File

@ -23,16 +23,5 @@
arch: "{{ ansible_architecture }}"
kernel: "{{ ansible_kernel }}"
- name: Get GPU info with nvidia-smi
shell: |
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
register: gpu_info
ignore_errors: yes
- name: Merge GPU info into final info
set_fact:
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
- debug:
var: info

View File

@ -2,12 +2,9 @@ id: gather_facts_windows
name: "{{ 'Gather facts windows' | trans }}"
version: 1
method: gather_facts
category:
- host
- ds
category: host
type:
- windows
- windows_ad
i18n:
Gather facts windows:
zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息'

View File

@ -3,10 +3,8 @@ name: "{{ 'Ping by pyfreerdp' | trans }}"
category:
- device
- host
- ds
type:
- windows
- windows_ad
method: ping
protocol: rdp
priority: 1

View File

@ -3,7 +3,6 @@ name: "{{ 'Ping by paramiko' | trans }}"
category:
- device
- host
- ds
type:
- all
method: ping

View File

@ -3,7 +3,6 @@ name: "{{ 'Ping by telnet' | trans }}"
category:
- device
- host
- ds
type:
- all
method: ping

View File

@ -2,12 +2,9 @@ id: win_ping
name: "{{ 'Windows ping' | trans }}"
version: 1
method: ping
category:
- host
- ds
category: host
type:
- windows
- windows_ad
i18n:
Windows ping:
zh: 使用 Ansible 模块 内置模块 win_ping 来测试可连接性

View File

@ -37,11 +37,10 @@ class PingManager(BasePlaybookManager):
def on_host_error(self, host, error, result):
asset, account = self.host_asset_and_account_mapper.get(host)
try:
error_tp = asset.get_err_connectivity(error)
asset.set_connectivity(error_tp)
asset.set_connectivity(Connectivity.ERR)
if not account:
return
account.set_connectivity(error_tp)
account.set_connectivity(Connectivity.ERR)
except Exception as e:
print(f'\033[31m Update account {account.name} or '
f'update asset {asset.name} connectivity failed: {e} \033[0m\n')

View File

@ -7,12 +7,6 @@ class Connectivity(TextChoices):
NA = 'na', _('N/A')
OK = 'ok', _('OK')
ERR = 'err', _('Error')
AUTH_ERR = 'auth_err', _('Authentication error')
SUDO_ERR = 'sudo_err', _('Sudo permission error')
PASSWORD_ERR = 'password_err', _('Invalid password error')
OPENSSH_KEY_ERR = 'openssh_key_err', _('OpenSSH key error')
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
CREATE_DIR_ERR = 'create_dir_err', _('Create directory error')
class AutomationTypes(TextChoices):

View File

@ -37,7 +37,7 @@ class FillType(models.TextChoices):
class BaseType(TextChoices):
"""
约束应该考虑代是对平台对限制避免多余对选项: mysql 开启 ssh,
或者开启了也没有作用, 比如 k8s 开启了 gateway 目前还不支持
或者开启了也没有作用, 比如 k8s 开启了 domain目前还不支持
"""
@classmethod
@ -112,7 +112,8 @@ class BaseType(TextChoices):
@classmethod
def get_choices(cls):
choices = cls.choices
if not settings.XPACK_LICENSE_IS_VALID and hasattr(cls, 'get_community_types'):
if not settings.XPACK_LICENSE_IS_VALID:
choices = [(tp.value, tp.label) for tp in cls.get_community_types()]
else:
choices = cls.choices
return choices

View File

@ -12,7 +12,6 @@ class Category(ChoicesMixin, models.TextChoices):
DATABASE = 'database', _("Database")
CLOUD = 'cloud', _("Cloud service")
WEB = 'web', _("Web")
DS = 'ds', _("Directory service")
CUSTOM = 'custom', _("Custom type")
@classmethod

View File

@ -13,11 +13,11 @@ class CloudTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': False,
'domain_enabled': False,
'su_enabled': False,
},
cls.K8S: {
'gateway_enabled': True,
'domain_enabled': True,
}
}

View File

@ -20,7 +20,7 @@ class CustomTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': False,
'domain_enabled': False,
'su_enabled': False,
},
}

View File

@ -20,7 +20,7 @@ class DatabaseTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': True,
'domain_enabled': True,
'su_enabled': False,
}
}

View File

@ -19,8 +19,7 @@ class DeviceTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': True,
'ds_enabled': True,
'domain_enabled': True,
'su_enabled': True,
'su_methods': ['enable', 'super', 'super_level']
}

View File

@ -1,70 +0,0 @@
from django.utils.translation import gettext_lazy as _
from .base import BaseType
class DirectoryTypes(BaseType):
GENERAL = 'general', _('General')
# LDAP = 'ldap', _('LDAP')
# AD = 'ad', _('Active Directory')
WINDOWS_AD = 'windows_ad', _('Windows Active Directory')
# AZURE_AD = 'azure_ad', _('Azure Active Directory')
@classmethod
def _get_base_constrains(cls) -> dict:
return {
'*': {
'charset_enabled': True,
'gateway_enabled': True,
'ds_enabled': False,
'su_enabled': True,
},
cls.WINDOWS_AD: {
'su_enabled': False,
}
}
@classmethod
def _get_automation_constrains(cls) -> dict:
constrains = {
'*': {
'ansible_enabled': False,
},
cls.WINDOWS_AD: {
'ansible_enabled': True,
'ping_enabled': True,
'gather_facts_enabled': True,
'verify_account_enabled': True,
'change_secret_enabled': True,
'push_account_enabled': True,
'gather_accounts_enabled': True,
'remove_account_enabled': True,
}
}
return constrains
@classmethod
def _get_protocol_constrains(cls) -> dict:
return {
cls.GENERAL: {
'choices': ['ssh']
},
cls.WINDOWS_AD: {
'choices': ['rdp', 'ssh', 'vnc', 'winrm']
},
}
@classmethod
def internal_platforms(cls):
return {
cls.WINDOWS_AD: [
{'name': 'Windows Active Directory'}
],
}
@classmethod
def get_community_types(cls):
return [
cls.GENERAL,
]

View File

@ -11,7 +11,7 @@ class GPTTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': False,
'domain_enabled': False,
'su_enabled': False,
}
}

View File

@ -18,9 +18,8 @@ class HostTypes(BaseType):
'*': {
'charset_enabled': True,
'charset': 'utf-8', # default
'gateway_enabled': True,
'domain_enabled': True,
'su_enabled': True,
'ds_enabled': True,
'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'],
},
cls.WINDOWS: {
@ -57,6 +56,7 @@ class HostTypes(BaseType):
'change_secret_enabled': True,
'push_account_enabled': True,
'remove_account_enabled': True,
},
cls.WINDOWS: {
'ansible_config': {
@ -69,6 +69,7 @@ class HostTypes(BaseType):
'ping_enabled': False,
'gather_facts_enabled': False,
'gather_accounts_enabled': False,
'verify_account_enabled': False,
'change_secret_enabled': False,
'push_account_enabled': False
},
@ -81,7 +82,7 @@ class HostTypes(BaseType):
{'name': 'Linux'},
{
'name': GATEWAY_NAME,
'gateway_enabled': True,
'domain_enabled': True,
}
],
cls.UNIX: [
@ -125,5 +126,5 @@ class HostTypes(BaseType):
@classmethod
def get_community_types(cls) -> list:
return [
cls.LINUX, cls.WINDOWS, cls.UNIX, cls.OTHER_HOST
cls.LINUX, cls.UNIX, cls.WINDOWS, cls.OTHER_HOST
]

View File

@ -13,7 +13,6 @@ from .cloud import CloudTypes
from .custom import CustomTypes
from .database import DatabaseTypes
from .device import DeviceTypes
from .ds import DirectoryTypes
from .gpt import GPTTypes
from .host import HostTypes
from .web import WebTypes
@ -23,8 +22,7 @@ class AllTypes(ChoicesMixin):
choices: list
includes = [
HostTypes, DeviceTypes, DatabaseTypes,
CloudTypes, WebTypes, CustomTypes,
DirectoryTypes, GPTTypes
CloudTypes, WebTypes, CustomTypes, GPTTypes
]
_category_constrains = {}
_automation_methods = None
@ -175,7 +173,6 @@ class AllTypes(ChoicesMixin):
(Category.DATABASE, DatabaseTypes),
(Category.WEB, WebTypes),
(Category.CLOUD, CloudTypes),
(Category.DS, DirectoryTypes),
(Category.CUSTOM, CustomTypes)
]
return types
@ -312,7 +309,7 @@ class AllTypes(ChoicesMixin):
'category': category,
'type': tp, 'internal': True,
'charset': constraints.get('charset', 'utf-8'),
'gateway_enabled': constraints.get('gateway_enabled', False),
'domain_enabled': constraints.get('domain_enabled', False),
'su_enabled': constraints.get('su_enabled', False),
}
if data['su_enabled'] and data.get('su_methods'):

View File

@ -11,7 +11,7 @@ class WebTypes(BaseType):
return {
'*': {
'charset_enabled': False,
'gateway_enabled': False,
'domain_enabled': False,
'su_enabled': False,
}
}

View File

@ -1,11 +1,11 @@
# Generated by Django 4.1.13 on 2024-05-09 03:16
import django.db.models.deletion
import json
import assets.models.asset.common
from django.db.models import F, Q
from django.conf import settings
from django.db import migrations, models
from django.db.models import F
import assets.models.asset.common
import django.db.models.deletion
class Migration(migrations.Migration):
@ -39,26 +39,22 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='automationexecution',
name='automation',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions',
to='assets.baseautomation', verbose_name='Automation task'),
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
),
migrations.AddField(
model_name='asset',
name='domain',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
related_name='assets', to='assets.domain', verbose_name='Zone'),
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.domain', verbose_name='Zone'),
),
migrations.AddField(
model_name='asset',
name='nodes',
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets',
to='assets.node', verbose_name='Nodes'),
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', to='assets.node', verbose_name='Nodes'),
),
migrations.AddField(
model_name='asset',
name='platform',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets',
to='assets.platform', verbose_name='Platform'),
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', to='assets.platform', verbose_name='Platform'),
),
migrations.CreateModel(
name='AssetBaseAutomation',
@ -75,9 +71,7 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='GatherFactsAutomation',
fields=[
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Gather asset facts',
@ -87,9 +81,7 @@ class Migration(migrations.Migration):
migrations.CreateModel(
name='PingAutomation',
fields=[
('baseautomation_ptr',
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
primary_key=True, serialize=False, to='assets.baseautomation')),
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
],
options={
'verbose_name': 'Ping asset',

View File

@ -18,7 +18,7 @@ platforms_data_json = '''[
"type": "linux",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -119,7 +119,7 @@ platforms_data_json = '''[
"type": "unix",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -209,7 +209,7 @@ platforms_data_json = '''[
"type": "unix",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -299,7 +299,7 @@ platforms_data_json = '''[
"type": "unix",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -389,7 +389,7 @@ platforms_data_json = '''[
"type": "windows",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -481,7 +481,7 @@ platforms_data_json = '''[
"security": "any"
},
"internal": false,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -582,7 +582,7 @@ platforms_data_json = '''[
"type": "other",
"meta": {},
"internal": false,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -684,7 +684,7 @@ platforms_data_json = '''[
"security": "rdp"
},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -776,7 +776,7 @@ platforms_data_json = '''[
"security": "tls"
},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -866,7 +866,7 @@ platforms_data_json = '''[
"type": "unix",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -956,7 +956,7 @@ platforms_data_json = '''[
"type": "linux",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": null,
"custom_fields": [],
@ -1057,7 +1057,7 @@ platforms_data_json = '''[
"type": "windows",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1136,7 +1136,7 @@ platforms_data_json = '''[
"type": "general",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1201,7 +1201,7 @@ platforms_data_json = '''[
"type": "general",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": "enable",
"custom_fields": [],
@ -1266,7 +1266,7 @@ platforms_data_json = '''[
"type": "general",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": "super",
"custom_fields": [],
@ -1332,7 +1332,7 @@ platforms_data_json = '''[
"type": "general",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": true,
"su_method": "super_level",
"custom_fields": [],
@ -1397,7 +1397,7 @@ platforms_data_json = '''[
"type": "mysql",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1449,7 +1449,7 @@ platforms_data_json = '''[
"type": "mariadb",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1501,7 +1501,7 @@ platforms_data_json = '''[
"type": "postgresql",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1553,7 +1553,7 @@ platforms_data_json = '''[
"type": "oracle",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1605,7 +1605,7 @@ platforms_data_json = '''[
"type": "sqlserver",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1657,7 +1657,7 @@ platforms_data_json = '''[
"type": "clickhouse",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1709,7 +1709,7 @@ platforms_data_json = '''[
"type": "mongodb",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1761,7 +1761,7 @@ platforms_data_json = '''[
"type": "redis",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1815,7 +1815,7 @@ platforms_data_json = '''[
"type": "redis",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1869,7 +1869,7 @@ platforms_data_json = '''[
"type": "website",
"meta": {},
"internal": true,
"gateway_enabled": false,
"domain_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1924,7 +1924,7 @@ platforms_data_json = '''[
"type": "private",
"meta": {},
"internal": true,
"gateway_enabled": false,
"domain_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -1979,7 +1979,7 @@ platforms_data_json = '''[
"type": "k8s",
"meta": {},
"internal": true,
"gateway_enabled": false,
"domain_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -2029,7 +2029,7 @@ platforms_data_json = '''[
"type": "chatgpt",
"meta": {},
"internal": true,
"gateway_enabled": false,
"domain_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -2081,7 +2081,7 @@ platforms_data_json = '''[
"type": "db2",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
@ -2131,7 +2131,7 @@ platforms_data_json = '''[
"type": "dameng",
"meta": {},
"internal": true,
"gateway_enabled": true,
"domain_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],

View File

@ -1,57 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-03 09:51
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("assets", "0015_automationexecution_type"),
]
operations = [
migrations.CreateModel(
name="DirectoryService",
fields=[
(
"asset_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="assets.asset",
),
),
(
"domain_name",
models.CharField(
blank=True,
default="",
max_length=128,
verbose_name="Domain name",
),
),
],
options={
"verbose_name": "Directory service",
"default_related_name": "ds"
},
bases=("assets.asset",),
),
migrations.AddField(
model_name="platform",
name="ds_enabled",
field=models.BooleanField(default=False, verbose_name="DS enabled"),
),
migrations.AddField(
model_name="asset",
name="directory_services",
field=models.ManyToManyField(
related_name="assets",
to="assets.directoryservice",
verbose_name="Directory services",
)
),
]

View File

@ -1,165 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-07 03:24
import json
from django.db import migrations
from assets.const import AllTypes
def add_ds_platforms(apps, schema_editor):
data = """
[
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "WindowsActiveDirectory",
"category": "ds",
"type": "windows_ad",
"meta": {},
"internal": true,
"gateway_enabled": true,
"su_enabled": false,
"su_method": null,
"custom_fields": [],
"automation": {
"ansible_enabled": true,
"ansible_config": {
"ansible_shell_type": "cmd",
"ansible_connection": "ssh"
},
"ping_enabled": true,
"ping_method": "ping_by_rdp",
"ping_params": {},
"gather_facts_enabled": true,
"gather_facts_method": "gather_facts_windows",
"gather_facts_params": {},
"change_secret_enabled": true,
"change_secret_method": "change_secret_ad_windows",
"change_secret_params": {
},
"push_account_enabled": true,
"push_account_method": "push_account_ad_windows",
"push_account_params": {},
"verify_account_enabled": true,
"verify_account_method": "verify_account_by_rdp",
"verify_account_params": {
},
"gather_accounts_enabled": true,
"gather_accounts_method": "gather_accounts_windows_ad",
"gather_accounts_params": {
},
"remove_account_enabled": true,
"remove_account_method": "remove_account_ad_windows",
"remove_account_params": {
}
},
"protocols": [
{
"name": "rdp",
"port": 3389,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"console": false,
"security": "any"
}
},
{
"name": "ssh",
"port": 22,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
},
{
"name": "vnc",
"port": 5900,
"primary": false,
"required": false,
"default": false,
"public": true,
"setting": {
}
},
{
"name": "winrm",
"port": 5985,
"primary": false,
"required": false,
"default": false,
"public": false,
"setting": {
"use_ssl": false
}
}
]
},
{
"created_by": "system",
"updated_by": "system",
"comment": "",
"name": "General",
"category": "ds",
"type": "general",
"meta": {
},
"internal": true,
"gateway_enabled": false,
"su_enabled": false,
"su_method": null,
"custom_fields": [
],
"automation": {
"ansible_enabled": false,
"ansible_config": {
}
},
"protocols": [
{
"name": "ssh",
"port": 22,
"primary": true,
"required": false,
"default": false,
"public": true,
"setting": {
"sftp_enabled": true,
"sftp_home": "/tmp"
}
}
]
}
]
"""
platform_model = apps.get_model('assets', 'Platform')
automation_cls = apps.get_model('assets', 'PlatformAutomation')
platform_datas = json.loads(data)
for platform_data in platform_datas:
AllTypes.create_or_update_by_platform_data(
platform_data, platform_cls=platform_model,
automation_cls=automation_cls
)
class Migration(migrations.Migration):
dependencies = [
("assets", "0016_directory_service"),
]
operations = [
migrations.RunPython(add_ds_platforms)
]

View File

@ -1,26 +0,0 @@
# Generated by Django 4.1.13 on 2025-04-18 08:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("assets", "0017_auto_20250407_1124"),
]
operations = [
migrations.RenameField(
model_name="platform",
old_name="domain_enabled",
new_name="gateway_enabled",
),
migrations.RenameModel(
old_name="Domain",
new_name="Zone",
),
migrations.RenameField(
model_name="asset",
old_name="domain",
new_name="zone",
),
]

View File

@ -1,10 +1,9 @@
# noqa
from .base import *
from .platform import *
from .asset import *
from .label import Label
from .gateway import *
from .zone import * # noqa
from .domain import *
from .node import *
from .favorite_asset import *
from .automations import *

View File

@ -3,7 +3,6 @@ from .common import *
from .custom import *
from .database import *
from .device import *
from .ds import *
from .gpt import *
from .host import *
from .web import *

View File

@ -6,7 +6,7 @@ import logging
from collections import defaultdict
from django.db import models
from django.db.models import Q, Count
from django.db.models import Q
from django.forms import model_to_dict
from django.utils.translation import gettext_lazy as _
@ -168,17 +168,13 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = models.ForeignKey(
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
)
zone = models.ForeignKey(
"assets.Zone", null=True, blank=True, related_name='assets',
domain = models.ForeignKey(
"assets.Domain", null=True, blank=True, related_name='assets',
verbose_name=_("Zone"), on_delete=models.SET_NULL
)
nodes = models.ManyToManyField(
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
)
directory_services = models.ManyToManyField(
'assets.DirectoryService', related_name='assets',
verbose_name=_("Directory services")
)
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
@ -205,10 +201,6 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
info[i.name] = v
return info
@lazyproperty
def is_directory_service(self):
return self.category == const.Category.DS and hasattr(self, 'ds')
@lazyproperty
def spec_info(self):
instance = getattr(self, self.category, None)
@ -244,7 +236,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
platform = self.platform
auto_config = {
'su_enabled': platform.su_enabled,
'gateway_enabled': platform.gateway_enabled,
'domain_enabled': platform.domain_enabled,
'ansible_enabled': False
}
automation = getattr(self.platform, 'automation', None)
@ -253,28 +245,9 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
auto_config.update(model_to_dict(automation))
return auto_config
@property
def all_accounts(self):
if not self.joined_dir_svcs:
queryset = self.accounts.all()
else:
queryset = self.accounts.model.objects.filter(asset__in=[self.id, *self.joined_dir_svcs])
return queryset
@property
def dc_accounts(self):
queryset = self.accounts.model.objects.filter(asset__in=[*self.joined_dir_svcs])
return queryset
@lazyproperty
def all_valid_accounts(self):
queryset = (self.all_accounts.filter(is_active=True)
.prefetch_related('asset', 'asset__platform'))
return queryset
@lazyproperty
def accounts_amount(self):
return self.all_accounts.count()
return self.accounts.count()
def get_target_ip(self):
return self.address
@ -286,41 +259,6 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
protocol = self.protocols.all().filter(name=protocol).first()
return protocol.port if protocol else 0
def is_dir_svc(self):
return self.category == const.Category.DS
@property
def joined_dir_svcs(self):
return self.directory_services.all()
@classmethod
def compute_all_accounts_amount(cls, assets):
from .ds import DirectoryService
asset_ids = [asset.id for asset in assets]
asset_id_dc_ids_mapper = defaultdict(list)
dc_ids = set()
asset_dc_relations = (
Asset.directory_services.through.objects
.filter(asset_id__in=asset_ids)
.values_list('asset_id', 'directoryservice_id')
)
for asset_id, ds_id in asset_dc_relations:
dc_ids.add(ds_id)
asset_id_dc_ids_mapper[asset_id].append(ds_id)
directory_services = (
DirectoryService.objects.filter(id__in=dc_ids)
.annotate(accounts_amount=Count('accounts'))
)
ds_accounts_amount_mapper = {ds.id: ds.accounts_amount for ds in directory_services}
for asset in assets:
asset_dc_ids = asset_id_dc_ids_mapper.get(asset.id, [])
for dc_id in asset_dc_ids:
ds_accounts = ds_accounts_amount_mapper.get(dc_id, 0)
asset.accounts_amount += ds_accounts
return assets
@property
def is_valid(self):
warning = ''
@ -362,11 +300,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
@lazyproperty
def gateway(self):
if not self.zone_id:
if not self.domain_id:
return
if not self.platform.gateway_enabled:
if not self.platform.domain_enabled:
return
return self.zone.select_gateway()
return self.domain.select_gateway()
def as_node(self):
from assets.models import Node

View File

@ -1,14 +0,0 @@
from django.db import models
from django.utils.translation import gettext_lazy as _
from .common import Asset
__all__ = ['DirectoryService']
class DirectoryService(Asset):
domain_name = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Domain name"))
class Meta:
default_related_name = "ds"
verbose_name = _("Directory service")

View File

@ -23,28 +23,6 @@ class AbsConnectivity(models.Model):
self.date_verified = timezone.now()
self.save(update_fields=['connectivity', 'date_verified'])
@staticmethod
def get_err_connectivity(msg=None):
msg = (msg or '').strip().lower()
error_map = {
'permission denied': Connectivity.AUTH_ERR,
'authentication failed': Connectivity.AUTH_ERR,
'authentication failure': Connectivity.AUTH_ERR,
'is not in the sudoers file': Connectivity.SUDO_ERR,
'expected openssh key': Connectivity.OPENSSH_KEY_ERR,
'invalid/incorrect password': Connectivity.PASSWORD_ERR,
'failed to create directory': Connectivity.CREATE_DIR_ERR,
'ntlm: the specified credentials were rejected by the server': Connectivity.NTLM_ERR,
}
for key, value in error_map.items():
if key in msg:
return value
return Connectivity.ERR
@property
def is_connective(self):
if self.connectivity == Connectivity.OK:

View File

@ -12,10 +12,10 @@ from .gateway import Gateway
logger = get_logger(__file__)
__all__ = ['Zone']
__all__ = ['Domain']
class Zone(LabeledMixin, JMSOrgBaseModel):
class Domain(LabeledMixin, JMSOrgBaseModel):
name = models.CharField(max_length=128, verbose_name=_('Name'))
class Meta:
@ -49,7 +49,7 @@ class Zone(LabeledMixin, JMSOrgBaseModel):
@property
def gateways(self):
queryset = self.get_gateway_queryset().filter(zone=self)
queryset = self.get_gateway_queryset().filter(domain=self)
return queryset
@classmethod

View File

@ -101,8 +101,7 @@ class Platform(LabeledMixin, JMSBaseModel):
default=CharsetChoices.utf8, choices=CharsetChoices.choices,
max_length=8, verbose_name=_("Charset")
)
gateway_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
ds_enabled = models.BooleanField(default=False, verbose_name=_("DS enabled"))
domain_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
# 账号有关的
su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled"))
su_method = models.CharField(max_length=32, blank=True, null=True, verbose_name=_("Su method"))
@ -116,11 +115,6 @@ class Platform(LabeledMixin, JMSBaseModel):
def assets_amount(self):
return self.assets.count()
def save(self, *args, **kwargs):
if not self.ds_enabled:
self.ds = None
super().save(*args, **kwargs)
@classmethod
def default(cls):
linux, created = cls.objects.get_or_create(

View File

@ -4,7 +4,6 @@ from .common import *
from .custom import *
from .database import *
from .device import *
from .ds import *
from .gpt import *
from .host import *
from .web import *

View File

@ -147,20 +147,18 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
attrs=('id', 'name', 'type'))
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'), attrs=('id', 'name', 'type'))
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
_accounts = None
class Meta:
model = Asset
fields_fk = ['zone', 'platform']
fields_fk = ['domain', 'platform']
fields_mini = ['id', 'name', 'address'] + fields_fk
fields_small = fields_mini + ['is_active', 'comment']
fields_m2m = [
'nodes', 'labels', 'protocols',
'nodes_display', 'accounts',
'directory_services',
]
read_only_fields = [
'accounts_amount', 'category', 'type', 'connectivity', 'auto_config',
@ -174,11 +172,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
'address': {'label': _('Address')},
'nodes_display': {'label': _('Node path')},
'nodes': {'allow_empty': True, 'label': _("Nodes")},
'directory_services': {
'required': False,
'allow_empty': True,
'default': list, 'label': _("Directory service")
},
}
def __init__(self, *args, **kwargs):
@ -233,11 +226,15 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
@classmethod
def setup_eager_loading(cls, queryset):
""" Perform necessary eager loading of data. """
queryset = queryset.prefetch_related('zone', 'nodes', 'protocols', 'directory_services') \
queryset = queryset.prefetch_related('domain', 'nodes', 'protocols', ) \
.prefetch_related('platform', 'platform__automation') \
.annotate(category=F("platform__category")) \
.annotate(type=F("platform__type")) \
.annotate(accounts_amount=Count('accounts'))
if queryset.model is Asset:
queryset = queryset.prefetch_related('labels__label', 'labels')
else:
queryset = queryset.prefetch_related('asset_ptr__labels__label', 'asset_ptr__labels')
return queryset
@staticmethod
@ -271,9 +268,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
raise serializers.ValidationError({'platform': _("Platform not exist")})
return platform
def validate_zone(self, value):
def validate_domain(self, value):
platform = self._asset_platform
if platform.gateway_enabled:
if platform.domain_enabled:
return value
else:
return None

View File

@ -1,22 +0,0 @@
from django.utils.translation import gettext_lazy as _
from assets.models import DirectoryService
from .common import AssetSerializer
__all__ = ['DSSerializer']
class DSSerializer(AssetSerializer):
class Meta(AssetSerializer.Meta):
model = DirectoryService
fields = AssetSerializer.Meta.fields + [
'domain_name',
]
extra_kwargs = {
**AssetSerializer.Meta.extra_kwargs,
'domain_name': {
'help_text': _('The domain part used by the directory service (e.g., AD) and appended to '
'the username during login, such as example.com in user@example.com.'),
'label': _('Domain name')
}
}

View File

@ -6,7 +6,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
vendor = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('Vendor'))
model = serializers.CharField(max_length=54, required=False, allow_blank=True, label=_('Model'))
sn = serializers.CharField(max_length=128, required=False, allow_blank=True, label=_('Serial number'))
cpu_model = serializers.CharField(allow_blank=True, required=False, label=_('CPU model'))
cpu_model = serializers.CharField(max_length=64, allow_blank=True, required=False, label=_('CPU model'))
cpu_count = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU count'))
cpu_cores = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU cores'))
cpu_vcpus = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU vcpus'))
@ -17,10 +17,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
gpu_model = serializers.CharField(allow_blank=True, required=False, label=_('GPU model'))
category_gathered_serializer_map = {
'host': HostGatheredInfoSerializer,
'ds': HostGatheredInfoSerializer,
}

Some files were not shown because too many files have changed in this diff Show More