mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-04-27 19:17:01 +00:00
Compare commits
139 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
a9d455e867 | ||
|
d06d26ac54 | ||
|
e992c44e11 | ||
|
24fe058fd9 | ||
|
a3fef9cc54 | ||
|
471053e62a | ||
|
dc6308b030 | ||
|
f016ae6161 | ||
|
14a8d877e0 | ||
|
ddf20570a1 | ||
|
1ad9616b7f | ||
|
d7bc6bb201 | ||
|
f855043468 | ||
|
3159a4e794 | ||
|
57fcebfdd3 | ||
|
c500bb4e4c | ||
|
fd062b0da6 | ||
|
bcb112d5c6 | ||
|
533dbf316c | ||
|
9cce94b709 | ||
|
8b815d812b | ||
|
a168fc8a62 | ||
|
faae1a09d1 | ||
|
26e819e120 | ||
|
79579654a1 | ||
|
6bc1c5bd50 | ||
|
36f312b943 | ||
|
11811c453b | ||
|
12fadeec58 | ||
|
b49fd21e08 | ||
|
9b982eb592 | ||
|
31652ef5b1 | ||
|
8fef18b991 | ||
|
c804c053d2 | ||
|
bef2282604 | ||
|
cabc069045 | ||
|
99c9a021b7 | ||
|
6cb3cc1f29 | ||
|
67422ef4ba | ||
|
3d6d2af268 | ||
|
ee97e45cc3 | ||
|
0131eaa6db | ||
|
eaa390fd6f | ||
|
e2b8fd0d40 | ||
|
2aace05099 | ||
|
1ee70af93d | ||
|
fa70fb2921 | ||
|
01a6019022 | ||
|
5c61a11d82 | ||
|
67f3341310 | ||
|
cb49e26387 | ||
|
314da330c0 | ||
|
f1c98fda34 | ||
|
1fdd1036d3 | ||
|
e286997090 | ||
|
ce3daf5496 | ||
|
631570b819 | ||
|
9b1bff0847 | ||
|
ee8a2afe16 | ||
|
1a01c0537c | ||
|
64393fe695 | ||
|
11ef4fab4e | ||
|
9f8256f885 | ||
|
5390fbacec | ||
|
8b9fe3c72b | ||
|
20070e0647 | ||
|
47b72cb35e | ||
|
2ca0e9a5a2 | ||
|
3b2ac101c8 | ||
|
6795f036dd | ||
|
aaa1f48258 | ||
|
53c5bab203 | ||
|
1254d28463 | ||
|
d6b1a577fc | ||
|
5ab85d3561 | ||
|
467f4c5d4f | ||
|
f2404319af | ||
|
bbeadf7dbe | ||
|
941bd9b3f4 | ||
|
37a307a9d0 | ||
|
528f9045d0 | ||
|
a317549a01 | ||
|
0f5681de7d | ||
|
a7c514f8d8 | ||
|
75ea0079a2 | ||
|
4cc1687bf8 | ||
|
76e57b9a3e | ||
|
ba3bce1e2e | ||
|
45f0343cfa | ||
|
acaa4cf2d5 | ||
|
3f452daee8 | ||
|
5e25361ee8 | ||
|
7b7604e14d | ||
|
f9037878c3 | ||
|
29ddfcac17 | ||
|
519ec65ad4 | ||
|
1f60e328b6 | ||
|
e8e0ea920b | ||
|
4fd8efd043 | ||
|
623c800d31 | ||
|
d2c6e3c7a6 | ||
|
dc5883576d | ||
|
0a9c9fb227 | ||
|
15a1a58eca | ||
|
782401ef86 | ||
|
8abcd201bc | ||
|
cdbc10ac72 | ||
|
ceeef890e6 | ||
|
dc8a172884 | ||
|
62115e43bb | ||
|
5eced85e69 | ||
|
ec99b17b76 | ||
|
84569720c3 | ||
|
65984d38f1 | ||
|
f6913ac63c | ||
|
514b2cdfc5 | ||
|
b55000663e | ||
|
9ed822bb3e | ||
|
ea599d7695 | ||
|
01c5d68b35 | ||
|
2e2c331941 | ||
|
266ea9b858 | ||
|
5f2e838342 | ||
|
544ad5532b | ||
|
d22d715ee7 | ||
|
dd2366532c | ||
|
9667a3d340 | ||
|
c8e6e5d38c | ||
|
9d1047fae2 | ||
|
28f97d746d | ||
|
be72344c63 | ||
|
d3176b68a8 | ||
|
5411f65546 | ||
|
e3ba468004 | ||
|
a03a11efa4 | ||
|
d344495417 | ||
|
9412bd0331 | ||
|
8d73ddb1cd | ||
|
7fe56a5e1a |
@ -8,4 +8,6 @@ celerybeat.pid
|
||||
.vagrant/
|
||||
apps/xpack/.git
|
||||
.history/
|
||||
.idea
|
||||
.idea
|
||||
.venv/
|
||||
.env
|
4
.gitattributes
vendored
4
.gitattributes
vendored
@ -1,4 +0,0 @@
|
||||
*.mmdb filter=lfs diff=lfs merge=lfs -text
|
||||
*.mo filter=lfs diff=lfs merge=lfs -text
|
||||
*.ipdb filter=lfs diff=lfs merge=lfs -text
|
||||
leak_passwords.db filter=lfs diff=lfs merge=lfs -text
|
10
.github/dependabot.yml
vendored
Normal file
10
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "uv"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "09:30"
|
||||
timezone: "Asia/Shanghai"
|
||||
target-branch: dev
|
7
.github/workflows/translate-readme.yml
vendored
7
.github/workflows/translate-readme.yml
vendored
@ -2,10 +2,14 @@ name: Translate README
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
source_readme:
|
||||
description: "Source README"
|
||||
required: false
|
||||
default: "./readmes/README.en.md"
|
||||
target_langs:
|
||||
description: "Target Languages"
|
||||
required: false
|
||||
default: "zh-hans,zh-hant,ja,pt-br"
|
||||
default: "zh-hans,zh-hant,ja,pt-br,es,ru"
|
||||
gen_dir_path:
|
||||
description: "Generate Dir Name"
|
||||
required: false
|
||||
@ -34,6 +38,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.PRIVATE_TOKEN }}
|
||||
OPENAI_API_KEY: ${{ secrets.GPT_API_TOKEN }}
|
||||
GPT_MODE: ${{ github.event.inputs.gpt_mode }}
|
||||
SOURCE_README: ${{ github.event.inputs.source_readme }}
|
||||
TARGET_LANGUAGES: ${{ github.event.inputs.target_langs }}
|
||||
PUSH_BRANCH: ${{ github.event.inputs.push_branch }}
|
||||
GEN_DIR_PATH: ${{ github.event.inputs.gen_dir_path }}
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -46,3 +46,6 @@ test.py
|
||||
.test/
|
||||
*.mo
|
||||
apps.iml
|
||||
*.db
|
||||
*.mmdb
|
||||
*.ipdb
|
||||
|
@ -1,4 +1,4 @@
|
||||
FROM jumpserver/core-base:20250224_065619 AS stage-build
|
||||
FROM jumpserver/core-base:20250427_062456 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
FROM python:3.11-slim-bullseye
|
||||
ARG TARGETARCH
|
||||
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
|
||||
# Install APT dependencies
|
||||
ARG DEPENDENCIES=" \
|
||||
ca-certificates \
|
||||
@ -43,18 +43,19 @@ WORKDIR /opt/jumpserver
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
|
||||
set -ex \
|
||||
&& python3 -m venv /opt/py3 \
|
||||
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-cache --only main \
|
||||
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
|
||||
&& bash clean_site_packages.sh \
|
||||
&& poetry cache clear pypi --all
|
||||
&& uv venv \
|
||||
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
|
||||
&& ln -sf $(pwd)/.venv /opt/py3 \
|
||||
&& bash utils/static_files.sh \
|
||||
&& bash clean_site_packages.sh
|
||||
|
@ -24,11 +24,7 @@ RUN set -ex \
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN set -ex \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
|
||||
&& poetry install --only xpack \
|
||||
&& poetry cache clear pypi --all
|
||||
|
||||
RUN set -ex \
|
||||
&& uv pip install -i${PIP_MIRROR} --group xpack
|
||||
|
||||
|
33
README.md
33
README.md
@ -1,6 +1,6 @@
|
||||
<div align="center">
|
||||
<a name="readme-top"></a>
|
||||
<a href="https://jumpserver.org/index-en.html"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
|
||||
## An open-source PAM tool (Bastion Host)
|
||||
|
||||
@ -10,7 +10,7 @@
|
||||
[![][github-release-shield]][github-release-link]
|
||||
[![][github-stars-shield]][github-stars-link]
|
||||
|
||||
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md)
|
||||
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md)
|
||||
|
||||
</div>
|
||||
<br/>
|
||||
@ -19,7 +19,13 @@
|
||||
|
||||
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
|
||||

|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
|
||||
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
|
||||
</picture>
|
||||
|
||||
|
||||
## Quickstart
|
||||
|
||||
@ -36,18 +42,19 @@ Access JumpServer in your browser at `http://your-jumpserver-ip/`
|
||||
[](https://www.youtube.com/watch?v=UlGYRbKrpgY "JumpServer Quickstart")
|
||||
|
||||
## Screenshots
|
||||
|
||||
<table style="border-collapse: collapse; border: 1px solid black;">
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/99fabe5b-0475-4a53-9116-4c370a1426c4" alt="JumpServer Console" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/7c1f81af-37e8-4f07-8ac9-182895e1062e" alt="JumpServer PAM" /></td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/a424d731-1c70-4108-a7d8-5bbf387dda9a" alt="JumpServer Audits" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/393d2c27-a2d0-4dea-882d-00ed509e00c9" alt="JumpServer Workbench" /></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/user-attachments/assets/eaa41f66-8cc8-4f01-a001-0d258501f1c9" alt="JumpServer RBAC" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/3a2611cd-8902-49b8-b82b-2a6dac851f3e" alt="JumpServer Settings" /></td>
|
||||
</tr>
|
||||
|
||||
<tr>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/1e236093-31f7-4563-8eb1-e36d865f1568" alt="JumpServer SSH" /></td>
|
||||
<td style="padding: 5px;background-color:#fff;"><img src= "https://github.com/jumpserver/jumpserver/assets/32935519/69373a82-f7ab-41e8-b763-bbad2ba52167" alt="JumpServer RDP" /></td>
|
||||
@ -69,9 +76,9 @@ JumpServer consists of multiple key components, which collectively form the func
|
||||
| [KoKo](https://github.com/jumpserver/koko) | <a href="https://github.com/jumpserver/koko/releases"><img alt="Koko release" src="https://img.shields.io/github/release/jumpserver/koko.svg" /></a> | JumpServer Character Protocol Connector |
|
||||
| [Lion](https://github.com/jumpserver/lion) | <a href="https://github.com/jumpserver/lion/releases"><img alt="Lion release" src="https://img.shields.io/github/release/jumpserver/lion.svg" /></a> | JumpServer Graphical Protocol Connector |
|
||||
| [Chen](https://github.com/jumpserver/chen) | <a href="https://github.com/jumpserver/chen/releases"><img alt="Chen release" src="https://img.shields.io/github/release/jumpserver/chen.svg" /> | JumpServer Web DB |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Windows) |
|
||||
| [Tinker](https://github.com/jumpserver/tinker) | <img alt="Tinker" src="https://img.shields.io/badge/release-private-red" /> | JumpServer Remote Application Connector (Windows) |
|
||||
| [Panda](https://github.com/jumpserver/Panda) | <img alt="Panda" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Remote Application Connector (Linux) |
|
||||
| [Razor](https://github.com/jumpserver/razor) | <img alt="Chen" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE RDP Proxy Connector |
|
||||
| [Magnus](https://github.com/jumpserver/magnus) | <img alt="Magnus" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Database Proxy Connector |
|
||||
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
|
||||
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
|
||||
@ -81,12 +88,6 @@ JumpServer consists of multiple key components, which collectively form the func
|
||||
|
||||
Welcome to submit PR to contribute. Please refer to [CONTRIBUTING.md][contributing-link] for guidelines.
|
||||
|
||||
## Security
|
||||
|
||||
JumpServer is a mission critical product. Please refer to the Basic Security Recommendations for installation and deployment. If you encounter any security-related issues, please contact us directly:
|
||||
|
||||
- Email: support@fit2cloud.com
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) 2014-2025 FIT2CLOUD, All rights reserved.
|
||||
@ -115,5 +116,3 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
|
||||
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
|
||||
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
|
||||
|
||||
<!-- Image link -->
|
||||
|
@ -5,8 +5,7 @@ JumpServer 是一款正在成长的安全产品, 请参考 [基本安全建议
|
||||
如果你发现安全问题,请直接联系我们,我们携手让世界更好:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
- support@lxware.hk
|
||||
|
||||
|
||||
# Security Policy
|
||||
@ -16,6 +15,5 @@ JumpServer is a security product, The installation and development should follow
|
||||
All security bugs should be reported to the contact as below:
|
||||
|
||||
- ibuler@fit2cloud.com
|
||||
- support@fit2cloud.com
|
||||
- 400-052-0755
|
||||
- support@lxware.hk
|
||||
|
||||
|
@ -46,6 +46,16 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
}
|
||||
export_as_zip = True
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
asset_id = self.request.query_params.get('asset') or self.request.query_params.get('asset_id')
|
||||
if not asset_id:
|
||||
return queryset
|
||||
|
||||
asset = get_object_or_404(Asset, pk=asset_id)
|
||||
queryset = asset.all_accounts.all()
|
||||
return queryset
|
||||
|
||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||
def su_from_accounts(self, request, *args, **kwargs):
|
||||
account_id = request.query_params.get('account')
|
||||
@ -117,7 +127,7 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
self.model.objects.create(**account_data)
|
||||
success_count += 1
|
||||
except Exception as e:
|
||||
logger.debug(f'{ "Move" if move else "Copy" } to assets error: {e}')
|
||||
logger.debug(f'{"Move" if move else "Copy"} to assets error: {e}')
|
||||
creation_results[asset] = {'error': _('Account already exists'), 'state': 'error'}
|
||||
|
||||
results = [{'asset': str(asset), **res} for asset, res in creation_results.items()]
|
||||
|
@ -62,8 +62,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
)
|
||||
def get_once_secret(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
secret = instance.get_secret()
|
||||
return Response(data={'id': instance.id, 'secret': secret})
|
||||
return Response(data={'id': instance.id, 'secret': instance.secret})
|
||||
|
||||
@action(['GET'], detail=False, url_path='account-secret',
|
||||
permission_classes=[RBACPermission])
|
||||
|
@ -17,7 +17,7 @@ from orgs.mixins import generics
|
||||
__all__ = [
|
||||
'AutomationAssetsListApi', 'AutomationRemoveAssetApi',
|
||||
'AutomationAddAssetApi', 'AutomationNodeAddRemoveApi',
|
||||
'AutomationExecutionViewSet', 'RecordListMixin'
|
||||
'AutomationExecutionViewSet'
|
||||
]
|
||||
|
||||
|
||||
@ -39,9 +39,10 @@ class AutomationAssetsListApi(generics.ListAPIView):
|
||||
return assets
|
||||
|
||||
|
||||
class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
|
||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
@ -56,9 +57,10 @@ class AutomationRemoveAssetApi(generics.RetrieveUpdateAPIView):
|
||||
return Response({'msg': 'ok'})
|
||||
|
||||
|
||||
class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
|
||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
@ -72,9 +74,10 @@ class AutomationAddAssetApi(generics.RetrieveUpdateAPIView):
|
||||
return Response({"error": serializer.errors})
|
||||
|
||||
|
||||
class AutomationNodeAddRemoveApi(generics.RetrieveUpdateAPIView):
|
||||
class AutomationNodeAddRemoveApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
serializer_class = serializers.UpdateNodeSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
action_params = ['add', 'remove']
|
||||
@ -124,12 +127,3 @@ class AutomationExecutionViewSet(
|
||||
execution = self.get_object()
|
||||
report = execution.manager.gen_report()
|
||||
return HttpResponse(report)
|
||||
|
||||
|
||||
class RecordListMixin:
|
||||
def list(self, request, *args, **kwargs):
|
||||
try:
|
||||
response = super().list(request, *args, **kwargs)
|
||||
except Exception as e:
|
||||
response = Response({'detail': str(e)}, status=status.HTTP_400_BAD_REQUEST)
|
||||
return response
|
||||
|
@ -16,7 +16,7 @@ from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
|
||||
from rbac.permissions import RBACPermission
|
||||
from .base import (
|
||||
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@ -35,7 +35,7 @@ class ChangeSecretAutomationViewSet(OrgBulkModelViewSet):
|
||||
serializer_class = serializers.ChangeSecretAutomationSerializer
|
||||
|
||||
|
||||
class ChangeSecretRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
|
||||
class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
||||
filterset_class = ChangeSecretRecordFilterSet
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
search_fields = ('asset__address', 'account__username')
|
||||
|
@ -147,6 +147,7 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
serializer_class = serializers.CheckAccountEngineSerializer
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
perm_model = CheckAccountEngine
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
return CheckAccountEngine.get_default_engines()
|
||||
|
@ -9,7 +9,7 @@ from accounts.models import PushAccountAutomation, PushSecretRecord
|
||||
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
|
||||
from .base import (
|
||||
AutomationAssetsListApi, AutomationRemoveAssetApi, AutomationAddAssetApi,
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet, RecordListMixin
|
||||
AutomationNodeAddRemoveApi, AutomationExecutionViewSet
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
@ -42,7 +42,7 @@ class PushAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
return queryset
|
||||
|
||||
|
||||
class PushAccountRecordViewSet(RecordListMixin, mixins.ListModelMixin, OrgGenericViewSet):
|
||||
class PushAccountRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
||||
filterset_class = PushAccountRecordFilterSet
|
||||
search_fields = ('asset__address', 'account__username')
|
||||
ordering_fields = ('date_finished',)
|
||||
|
@ -69,7 +69,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
return
|
||||
|
||||
asset = privilege_account.asset
|
||||
accounts = asset.accounts.all()
|
||||
accounts = asset.all_accounts.all()
|
||||
accounts = accounts.filter(id__in=self.account_ids, secret_reset=True)
|
||||
|
||||
if self.secret_type:
|
||||
@ -94,6 +94,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'full_username': account.full_username,
|
||||
'secret_type': secret_type,
|
||||
'secret': account.escape_jinja2_syntax(new_secret),
|
||||
'private_key_path': private_key_path,
|
||||
|
@ -41,6 +41,7 @@
|
||||
password: "{{ account.secret | password_hash('des') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
@ -83,6 +84,7 @@
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
@ -101,7 +103,9 @@
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
@ -112,5 +116,7 @@
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
@ -41,6 +41,7 @@
|
||||
password: "{{ account.secret | password_hash('sha512') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
@ -83,6 +84,7 @@
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
@ -101,7 +103,9 @@
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
@ -112,5 +116,7 @@
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
@ -0,0 +1,27 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.windows.win_ping:
|
||||
|
||||
- name: Change password
|
||||
community.windows.win_domain_user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
update_password: always
|
||||
password_never_expires: yes
|
||||
state: present
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password
|
||||
ansible.windows.win_ping:
|
||||
vars:
|
||||
ansible_user: "{{ account.full_username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
@ -0,0 +1,27 @@
|
||||
id: change_secret_ad_windows
|
||||
name: "{{ 'Windows account change secret' | trans }}"
|
||||
version: 1
|
||||
method: change_secret
|
||||
category:
|
||||
- ds
|
||||
type:
|
||||
- windows_ad
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret:
|
||||
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号改密'
|
||||
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントのパスワード変更'
|
||||
en: 'Using Ansible module win_domain_user to change Windows account secret'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
@ -1,3 +0,0 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
|
||||
size 2547712
|
@ -13,6 +13,7 @@ def parse_date(date_str, default=None):
|
||||
formats = [
|
||||
'%Y/%m/%d %H:%M:%S',
|
||||
'%Y-%m-%dT%H:%M:%S',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
'%d-%m-%Y %H:%M:%S',
|
||||
'%Y/%m/%d',
|
||||
'%d-%m-%Y',
|
||||
@ -26,7 +27,6 @@ def parse_date(date_str, default=None):
|
||||
return default
|
||||
|
||||
|
||||
# TODO 后期会挪到 playbook 中
|
||||
class GatherAccountsFilter:
|
||||
def __init__(self, tp):
|
||||
self.tp = tp
|
||||
@ -208,14 +208,35 @@ class GatherAccountsFilter:
|
||||
key, value = parts
|
||||
user_info[key.strip()] = value.strip()
|
||||
detail = {'groups': user_info.get('Global Group memberships', ''), }
|
||||
user = {
|
||||
'username': user_info.get('User name', ''),
|
||||
'date_password_change': parse_date(user_info.get('Password last set', '')),
|
||||
'date_password_expired': parse_date(user_info.get('Password expires', '')),
|
||||
'date_last_login': parse_date(user_info.get('Last logon', '')),
|
||||
|
||||
username = user_info.get('User name')
|
||||
if not username:
|
||||
continue
|
||||
|
||||
result[username] = {
|
||||
'username': username,
|
||||
'date_password_change': parse_date(user_info.get('Password last set')),
|
||||
'date_password_expired': parse_date(user_info.get('Password expires')),
|
||||
'date_last_login': parse_date(user_info.get('Last logon')),
|
||||
'groups': detail,
|
||||
}
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def windows_ad_filter(info):
|
||||
result = {}
|
||||
for user_info in info['user_details']:
|
||||
detail = {'groups': user_info.get('GlobalGroupMemberships', ''), }
|
||||
username = user_info.get('SamAccountName')
|
||||
if not username:
|
||||
continue
|
||||
result[username] = {
|
||||
'username': username,
|
||||
'date_password_change': parse_date(user_info.get('PasswordLastSet')),
|
||||
'date_password_expired': parse_date(user_info.get('PasswordExpires')),
|
||||
'date_last_login': parse_date(user_info.get('LastLogonDate')),
|
||||
'groups': detail,
|
||||
}
|
||||
result[user['username']] = user
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
|
@ -4,6 +4,7 @@
|
||||
- name: Run net user command to get all users
|
||||
win_shell: net user
|
||||
register: user_list_output
|
||||
failed_when: false
|
||||
|
||||
- name: Parse all users from net user command
|
||||
set_fact:
|
||||
|
@ -2,10 +2,13 @@ id: gather_accounts_windows
|
||||
name: "{{ 'Windows account gather' | trans }}"
|
||||
version: 1
|
||||
method: gather_accounts
|
||||
category: host
|
||||
category:
|
||||
- host
|
||||
|
||||
type:
|
||||
- windows
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account gather:
|
||||
zh: 使用命令 net user 收集 Windows 账号
|
||||
|
@ -0,0 +1,74 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Import ActiveDirectory module
|
||||
win_shell: Import-Module ActiveDirectory
|
||||
args:
|
||||
warn: false
|
||||
|
||||
- name: Get the SamAccountName list of all AD users
|
||||
win_shell: |
|
||||
Import-Module ActiveDirectory
|
||||
Get-ADUser -Filter * | Select-Object -ExpandProperty SamAccountName
|
||||
register: ad_user_list
|
||||
|
||||
- name: Set the all_users variable
|
||||
set_fact:
|
||||
all_users: "{{ ad_user_list.stdout_lines }}"
|
||||
|
||||
- name: Get detailed information for each user
|
||||
win_shell: |
|
||||
Import-Module ActiveDirectory
|
||||
|
||||
$user = Get-ADUser -Identity {{ item }} -Properties Name, SamAccountName, Enabled, LastLogonDate, PasswordLastSet, msDS-UserPasswordExpiryTimeComputed, MemberOf
|
||||
|
||||
$globalGroups = @()
|
||||
if ($user.MemberOf) {
|
||||
$globalGroups = $user.MemberOf | ForEach-Object {
|
||||
try {
|
||||
$group = Get-ADGroup $_ -ErrorAction Stop
|
||||
if ($group.GroupScope -eq 'Global') { $group.Name }
|
||||
} catch {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$passwordExpiry = $null
|
||||
$expiryRaw = $user.'msDS-UserPasswordExpiryTimeComputed'
|
||||
if ($expiryRaw) {
|
||||
try {
|
||||
$passwordExpiry = [datetime]::FromFileTime($expiryRaw)
|
||||
} catch {
|
||||
$passwordExpiry = $null
|
||||
}
|
||||
}
|
||||
|
||||
$output = [PSCustomObject]@{
|
||||
Name = $user.Name
|
||||
SamAccountName = $user.SamAccountName
|
||||
Enabled = $user.Enabled
|
||||
LastLogonDate = if ($user.LastLogonDate) { $user.LastLogonDate.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
|
||||
PasswordLastSet = if ($user.PasswordLastSet) { $user.PasswordLastSet.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
|
||||
PasswordExpires = if ($passwordExpiry) { $passwordExpiry.ToString("yyyy-MM-dd HH:mm:ss") } else { $null }
|
||||
GlobalGroupMemberships = $globalGroups
|
||||
}
|
||||
|
||||
$output | ConvertTo-Json -Depth 3
|
||||
loop: "{{ all_users }}"
|
||||
register: ad_user_details
|
||||
ignore_errors: yes
|
||||
|
||||
|
||||
- set_fact:
|
||||
info:
|
||||
user_details: >-
|
||||
{{
|
||||
ad_user_details.results
|
||||
| selectattr('rc', 'equalto', 0)
|
||||
| map(attribute='stdout')
|
||||
| select('truthy')
|
||||
| map('from_json')
|
||||
}}
|
||||
|
||||
- debug:
|
||||
var: info
|
@ -0,0 +1,15 @@
|
||||
id: gather_accounts_windows_ad
|
||||
name: "{{ 'Windows account gather' | trans }}"
|
||||
version: 1
|
||||
method: gather_accounts
|
||||
category:
|
||||
- ds
|
||||
|
||||
type:
|
||||
- windows_ad
|
||||
|
||||
i18n:
|
||||
Windows account gather:
|
||||
zh: 使用命令 Get-ADUser 收集 Windows 账号
|
||||
ja: コマンド Get-ADUser を使用して Windows アカウントを収集する
|
||||
en: Using command Get-ADUser to gather accounts
|
@ -1,6 +1,6 @@
|
||||
import time
|
||||
from collections import defaultdict
|
||||
|
||||
import time
|
||||
from django.utils import timezone
|
||||
|
||||
from accounts.const import AutomationTypes
|
||||
@ -222,6 +222,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
def _collect_asset_account_info(self, asset, info):
|
||||
result = self._filter_success_result(asset.type, info)
|
||||
accounts = []
|
||||
|
||||
for username, info in result.items():
|
||||
self.asset_usernames_mapper[str(asset.id)].add(username)
|
||||
|
||||
@ -373,6 +374,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
|
||||
for asset, accounts_data in self.asset_account_info.items():
|
||||
ori_users = self.ori_asset_usernames[str(asset.id)]
|
||||
need_analyser_gather_account = []
|
||||
with tmp_to_org(asset.org_id):
|
||||
for d in accounts_data:
|
||||
username = d["username"]
|
||||
@ -385,10 +387,11 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
ga = ori_account
|
||||
self.update_gathered_account(ori_account, d)
|
||||
ori_found = username in ori_users
|
||||
risk_analyser.analyse_risk(asset, ga, d, ori_found)
|
||||
|
||||
need_analyser_gather_account.append((asset, ga, d, ori_found))
|
||||
self.create_gathered_account.finish()
|
||||
self.update_gathered_account.finish()
|
||||
for analysis_data in need_analyser_gather_account:
|
||||
risk_analyser.analyse_risk(*analysis_data)
|
||||
self.update_gather_accounts_status(asset)
|
||||
if not self.is_sync_account:
|
||||
continue
|
||||
|
@ -41,6 +41,7 @@
|
||||
password: "{{ account.secret | password_hash('des') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
@ -83,6 +84,7 @@
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
@ -101,7 +103,9 @@
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
@ -112,6 +116,8 @@
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
|
@ -41,6 +41,7 @@
|
||||
password: "{{ account.secret | password_hash('sha512') }}"
|
||||
update_password: always
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: "Get home directory for {{ account.username }}"
|
||||
@ -83,6 +84,7 @@
|
||||
user: "{{ account.username }}"
|
||||
key: "{{ account.secret }}"
|
||||
exclusive: "{{ ssh_params.exclusive }}"
|
||||
register: change_secret_result
|
||||
when: account.secret_type == "ssh_key"
|
||||
|
||||
- name: Refresh connection
|
||||
@ -101,7 +103,9 @@
|
||||
become_password: "{{ account.become.ansible_password | default('') }}"
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "password"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
- name: "Verify {{ account.username }} SSH KEY (paramiko)"
|
||||
@ -112,6 +116,8 @@
|
||||
login_private_key_path: "{{ account.private_key_path }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
when: account.secret_type == "ssh_key" and check_conn_after_change
|
||||
when:
|
||||
- account.secret_type == "ssh_key"
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
|
||||
|
@ -0,0 +1,27 @@
|
||||
- hosts: demo
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
ansible.windows.win_ping:
|
||||
|
||||
- name: Push user password
|
||||
community.windows.win_domain_user:
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
update_password: always
|
||||
password_never_expires: yes
|
||||
state: present
|
||||
groups: "{{ params.groups }}"
|
||||
groups_action: add
|
||||
ignore_errors: true
|
||||
when: account.secret_type == "password"
|
||||
|
||||
- name: Refresh connection
|
||||
ansible.builtin.meta: reset_connection
|
||||
|
||||
- name: Verify password
|
||||
ansible.windows.win_ping:
|
||||
vars:
|
||||
ansible_user: "{{ account.full_username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
when: account.secret_type == "password" and check_conn_after_change
|
@ -0,0 +1,25 @@
|
||||
id: push_account_ad_windows
|
||||
name: "{{ 'Windows account push' | trans }}"
|
||||
version: 1
|
||||
method: push_account
|
||||
category:
|
||||
- ds
|
||||
type:
|
||||
- windows_ad
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Windows account push:
|
||||
zh: '使用 Ansible 模块 win_domain_user 执行 Windows 账号推送'
|
||||
ja: 'Ansible win_domain_user モジュールを使用して Windows アカウントをプッシュする'
|
||||
en: 'Using Ansible module win_domain_user to push account'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
@ -11,4 +11,5 @@
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
name: "{{ jms_asset.spec_info.db_name }}"
|
||||
script: "DROP USER {{ account.username }}"
|
||||
script: "DROP LOGIN {{ account.username }}; select @@version"
|
||||
|
||||
|
@ -0,0 +1,9 @@
|
||||
- hosts: windows
|
||||
gather_facts: no
|
||||
tasks:
|
||||
- name: "Remove account"
|
||||
ansible.windows.win_domain_user:
|
||||
name: "{{ account.username }}"
|
||||
state: absent
|
||||
|
||||
|
@ -0,0 +1,14 @@
|
||||
id: remove_account_ad_windows
|
||||
name: "{{ 'Windows account remove' | trans }}"
|
||||
version: 1
|
||||
method: remove_account
|
||||
category:
|
||||
- ds
|
||||
type:
|
||||
- windows_ad
|
||||
|
||||
i18n:
|
||||
Windows account remove:
|
||||
zh: 使用 Ansible 模块 win_domain_user 删除账号
|
||||
ja: Ansible モジュール win_domain_user を使用してアカウントを削除する
|
||||
en: Use the Ansible module win_domain_user to delete an account
|
@ -10,6 +10,6 @@
|
||||
rdp_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_port: "{{ jms_asset.port }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_user: "{{ account.full_username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
login_secret_type: "{{ account.secret_type }}"
|
||||
|
@ -2,8 +2,10 @@ id: verify_account_by_rdp
|
||||
name: "{{ 'Windows rdp account verify' | trans }}"
|
||||
category:
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- windows
|
||||
- windows_ad
|
||||
method: verify_account
|
||||
protocol: rdp
|
||||
priority: 1
|
||||
|
@ -7,5 +7,5 @@
|
||||
- name: Verify account
|
||||
ansible.windows.win_ping:
|
||||
vars:
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_user: "{{ account.full_username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
|
@ -2,9 +2,12 @@ id: verify_account_windows
|
||||
name: "{{ 'Windows account verify' | trans }}"
|
||||
version: 1
|
||||
method: verify_account
|
||||
category: host
|
||||
category:
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- windows
|
||||
- windows_ad
|
||||
|
||||
i18n:
|
||||
Windows account verify:
|
||||
|
@ -42,7 +42,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
accounts = asset.accounts.all()
|
||||
accounts = asset.all_accounts.all()
|
||||
accounts = self.get_accounts(account, accounts)
|
||||
inventory_hosts = []
|
||||
|
||||
@ -64,6 +64,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
h['account'] = {
|
||||
'name': account.name,
|
||||
'username': account.username,
|
||||
'full_username': account.full_username,
|
||||
'secret_type': account.secret_type,
|
||||
'secret': account.escape_jinja2_syntax(secret),
|
||||
'private_key_path': private_key_path,
|
||||
@ -84,6 +85,7 @@ class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
def on_host_error(self, host, error, result):
|
||||
account = self.host_account_mapper.get(host)
|
||||
try:
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
error_tp = account.get_err_connectivity(error)
|
||||
account.set_connectivity(error_tp)
|
||||
except Exception as e:
|
||||
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')
|
||||
|
@ -5,7 +5,6 @@ import uuid
|
||||
import django_filters
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_filters import rest_framework as drf_filters
|
||||
from rest_framework import filters
|
||||
from rest_framework.compat import coreapi
|
||||
@ -13,11 +12,26 @@ from rest_framework.compat import coreapi
|
||||
from assets.models import Node
|
||||
from assets.utils import get_node_from_request
|
||||
from common.drf.filters import BaseFilterSet
|
||||
from common.utils import get_logger
|
||||
from common.utils.timezone import local_zero_hour, local_now
|
||||
from .const.automation import ChangeSecretRecordStatusChoice
|
||||
from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \
|
||||
AutomationExecution
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
|
||||
class UUIDFilterMixin:
|
||||
@staticmethod
|
||||
def filter_uuid(queryset, name, value):
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
except ValueError:
|
||||
logger.warning(f"Invalid UUID: {value}")
|
||||
return queryset.none()
|
||||
|
||||
return queryset.filter(**{name: value})
|
||||
|
||||
|
||||
class NodeFilterBackend(filters.BaseFilterBackend):
|
||||
fields = ['node_id']
|
||||
@ -43,14 +57,15 @@ class NodeFilterBackend(filters.BaseFilterBackend):
|
||||
return queryset
|
||||
|
||||
|
||||
class AccountFilterSet(BaseFilterSet):
|
||||
class AccountFilterSet(UUIDFilterMixin, BaseFilterSet):
|
||||
ip = drf_filters.CharFilter(field_name="address", lookup_expr="exact")
|
||||
name = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
|
||||
hostname = drf_filters.CharFilter(field_name="name", lookup_expr="exact")
|
||||
username = drf_filters.CharFilter(field_name="username", lookup_expr="exact")
|
||||
address = drf_filters.CharFilter(field_name="asset__address", lookup_expr="exact")
|
||||
asset_id = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
|
||||
asset = drf_filters.CharFilter(field_name="asset", lookup_expr="exact")
|
||||
assets = drf_filters.CharFilter(field_name="asset_id", lookup_expr="exact")
|
||||
asset_name = drf_filters.CharFilter(field_name="asset__name", lookup_expr="exact")
|
||||
asset_id = drf_filters.CharFilter(field_name="asset", method="filter_uuid")
|
||||
assets = drf_filters.CharFilter(field_name="asset_id", method="filter_uuid")
|
||||
has_secret = drf_filters.BooleanFilter(method="filter_has_secret")
|
||||
platform = drf_filters.CharFilter(
|
||||
field_name="asset__platform_id", lookup_expr="exact"
|
||||
@ -135,8 +150,9 @@ class AccountFilterSet(BaseFilterSet):
|
||||
kwargs.update({"date_change_secret__gt": date})
|
||||
|
||||
if name == "latest_secret_change_failed":
|
||||
queryset = queryset.filter(date_change_secret__gt=date).exclude(
|
||||
change_secret_status=ChangeSecretRecordStatusChoice.success
|
||||
queryset = (
|
||||
queryset.filter(date_change_secret__gt=date)
|
||||
.exclude(change_secret_status=ChangeSecretRecordStatusChoice.success)
|
||||
)
|
||||
|
||||
if kwargs:
|
||||
@ -146,8 +162,8 @@ class AccountFilterSet(BaseFilterSet):
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = [
|
||||
"id", "asset", "source_id", "secret_type", "category",
|
||||
"type", "privileged", "secret_reset", "connectivity", 'is_active'
|
||||
"id", "source_id", "secret_type", "category", "type",
|
||||
"privileged", "secret_reset", "connectivity", "is_active"
|
||||
]
|
||||
|
||||
|
||||
@ -185,16 +201,6 @@ class SecretRecordMixin(drf_filters.FilterSet):
|
||||
return queryset.filter(date_finished__gte=dt)
|
||||
|
||||
|
||||
class UUIDExecutionFilterMixin:
|
||||
@staticmethod
|
||||
def filter_execution(queryset, name, value):
|
||||
try:
|
||||
uuid.UUID(value)
|
||||
except ValueError:
|
||||
raise ValueError(_('Enter a valid UUID.'))
|
||||
return queryset.filter(**{name: value})
|
||||
|
||||
|
||||
class DaysExecutionFilterMixin:
|
||||
days = drf_filters.NumberFilter(method="filter_days")
|
||||
field: str
|
||||
@ -209,10 +215,10 @@ class DaysExecutionFilterMixin:
|
||||
|
||||
|
||||
class ChangeSecretRecordFilterSet(
|
||||
SecretRecordMixin, UUIDExecutionFilterMixin,
|
||||
SecretRecordMixin, UUIDFilterMixin,
|
||||
DaysExecutionFilterMixin, BaseFilterSet
|
||||
):
|
||||
execution_id = django_filters.CharFilter(method="filter_execution")
|
||||
execution_id = django_filters.CharFilter(method="filter_uuid")
|
||||
days = drf_filters.NumberFilter(method="filter_days")
|
||||
|
||||
field = 'date_finished'
|
||||
@ -230,8 +236,8 @@ class AutomationExecutionFilterSet(DaysExecutionFilterMixin, BaseFilterSet):
|
||||
fields = ["days", 'trigger', 'automation_id', 'automation__name']
|
||||
|
||||
|
||||
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDExecutionFilterMixin, BaseFilterSet):
|
||||
execution_id = django_filters.CharFilter(method="filter_execution")
|
||||
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterSet):
|
||||
execution_id = django_filters.CharFilter(method="filter_uuid")
|
||||
|
||||
class Meta:
|
||||
model = PushSecretRecord
|
||||
|
@ -1,65 +1,15 @@
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from django.db.models import Model
|
||||
from django.utils import translation
|
||||
from django.utils.translation import gettext_noop
|
||||
|
||||
from audits.const import ActionChoices
|
||||
from common.views.mixins import RecordViewLogMixin
|
||||
from common.utils import i18n_fmt
|
||||
from audits.handler import create_or_update_operate_log
|
||||
|
||||
|
||||
class AccountRecordViewLogMixin(RecordViewLogMixin):
|
||||
class AccountRecordViewLogMixin(object):
|
||||
get_object: callable
|
||||
get_queryset: callable
|
||||
|
||||
@staticmethod
|
||||
def _filter_params(params):
|
||||
new_params = {}
|
||||
need_pop_params = ('format', 'order')
|
||||
for key, value in params.items():
|
||||
if key in need_pop_params:
|
||||
continue
|
||||
if isinstance(value, list):
|
||||
value = list(filter(None, value))
|
||||
if value:
|
||||
new_params[key] = value
|
||||
return new_params
|
||||
|
||||
def get_resource_display(self, request):
|
||||
query_params = dict(request.query_params)
|
||||
params = self._filter_params(query_params)
|
||||
|
||||
spm_filter = params.pop("spm", None)
|
||||
|
||||
if not params and not spm_filter:
|
||||
display_message = gettext_noop("Export all")
|
||||
elif spm_filter:
|
||||
display_message = gettext_noop("Export only selected items")
|
||||
else:
|
||||
query = ",".join(
|
||||
["%s=%s" % (key, value) for key, value in params.items()]
|
||||
)
|
||||
display_message = i18n_fmt(gettext_noop("Export filtered: %s"), query)
|
||||
return display_message
|
||||
|
||||
@property
|
||||
def detail_msg(self):
|
||||
return i18n_fmt(
|
||||
gettext_noop('User %s view/export secret'), self.request.user
|
||||
)
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
list_func = getattr(super(), 'list')
|
||||
if not callable(list_func):
|
||||
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
response = list_func(request, *args, **kwargs)
|
||||
with translation.override('en'):
|
||||
resource_display = self.get_resource_display(request)
|
||||
ids = [q.id for q in self.get_queryset()]
|
||||
self.record_logs(
|
||||
ids, ActionChoices.view, self.detail_msg, resource_display=resource_display
|
||||
)
|
||||
return response
|
||||
model: Model
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
retrieve_func = getattr(super(), 'retrieve')
|
||||
@ -67,9 +17,9 @@ class AccountRecordViewLogMixin(RecordViewLogMixin):
|
||||
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
response = retrieve_func(request, *args, **kwargs)
|
||||
with translation.override('en'):
|
||||
resource = self.get_object()
|
||||
self.record_logs(
|
||||
[resource.id], ActionChoices.view, self.detail_msg, resource=resource
|
||||
create_or_update_operate_log(
|
||||
ActionChoices.view, self.model._meta.verbose_name,
|
||||
force=True, resource=self.get_object(),
|
||||
)
|
||||
return response
|
||||
|
||||
|
@ -131,9 +131,46 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
|
||||
@lazyproperty
|
||||
def alias(self):
|
||||
"""
|
||||
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
||||
"""
|
||||
if self.username.startswith('@'):
|
||||
return self.username
|
||||
return self.name
|
||||
return str(self.id)
|
||||
|
||||
def is_virtual(self):
|
||||
"""
|
||||
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
||||
"""
|
||||
return self.alias.startswith('@')
|
||||
|
||||
def is_ds_account(self):
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
if not self.asset.is_directory_service:
|
||||
return False
|
||||
return True
|
||||
|
||||
@lazyproperty
|
||||
def ds(self):
|
||||
if not self.is_ds_account():
|
||||
return None
|
||||
return self.asset.ds
|
||||
|
||||
@lazyproperty
|
||||
def ds_domain(self):
|
||||
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
if self.ds and self.ds.domain_name:
|
||||
return self.ds.domain_name
|
||||
return ''
|
||||
|
||||
@property
|
||||
def full_username(self):
|
||||
if self.ds_domain:
|
||||
return '{}@{}'.format(self.username, self.ds_domain)
|
||||
return self.username
|
||||
|
||||
@lazyproperty
|
||||
def has_secret(self):
|
||||
|
@ -92,8 +92,9 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
from .account import Account
|
||||
username = user.username
|
||||
|
||||
alias = AliasAccount.USER.value
|
||||
with tmp_to_org(asset.org):
|
||||
same_account = cls.objects.filter(alias='@USER').first()
|
||||
same_account = cls.objects.filter(alias=alias).first()
|
||||
|
||||
secret = ''
|
||||
if same_account and same_account.secret_from_login:
|
||||
@ -101,4 +102,6 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
|
||||
if not secret and not from_permed:
|
||||
secret = input_secret
|
||||
return Account(name=AliasAccount.USER.label, username=username, secret=secret)
|
||||
account = Account(name=AliasAccount.USER.label, username=username, secret=secret)
|
||||
account.alias = alias
|
||||
return account
|
||||
|
@ -233,6 +233,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
required=False, queryset=Account.objects, allow_null=True, allow_empty=True,
|
||||
label=_('Su from'), attrs=('id', 'name', 'username')
|
||||
)
|
||||
ds = ObjectRelatedField(read_only=True, label=_('Directory service'), attrs=('id', 'name', 'domain_name'))
|
||||
|
||||
class Meta(BaseAccountSerializer.Meta):
|
||||
model = Account
|
||||
@ -241,7 +242,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
'date_change_secret', 'change_secret_status'
|
||||
]
|
||||
fields = BaseAccountSerializer.Meta.fields + [
|
||||
'su_from', 'asset', 'version',
|
||||
'su_from', 'asset', 'version', 'ds',
|
||||
'source', 'source_id', 'secret_reset',
|
||||
] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields
|
||||
read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields
|
||||
@ -258,7 +259,7 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
queryset = queryset.prefetch_related(
|
||||
'asset', 'asset__platform',
|
||||
'asset__platform__automation'
|
||||
).prefetch_related('labels', 'labels__label')
|
||||
)
|
||||
return queryset
|
||||
|
||||
|
||||
|
@ -1,9 +1,11 @@
|
||||
from django.templatetags.static import static
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from accounts.models import IntegrationApplication
|
||||
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
|
||||
from common.serializers.fields import JSONManyToManyField
|
||||
from common.utils import random_string
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
|
||||
@ -27,13 +29,18 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
|
||||
'name': {'label': _('Name')},
|
||||
'accounts_amount': {'label': _('Accounts amount')},
|
||||
'is_active': {'default': True},
|
||||
'logo': {'required': False},
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
request_method = self.context.get('request').method
|
||||
if request_method == 'PUT':
|
||||
self.fields['logo'].required = False
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
if not data.get('logo'):
|
||||
data['logo'] = static('img/logo.png')
|
||||
return data
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs['secret'] = random_string(36)
|
||||
return attrs
|
||||
|
||||
|
||||
class IntegrationAccountSecretSerializer(serializers.Serializer):
|
||||
|
@ -129,7 +129,7 @@
|
||||
</tbody>
|
||||
</table>
|
||||
{% else %}
|
||||
<p class="no-data">{% trans 'No new accounts found' %}</p>
|
||||
<p class="no-data">{% trans 'No lost accounts found' %}</p>
|
||||
{% endif %}
|
||||
</div>
|
||||
</section>
|
||||
|
@ -8,6 +8,6 @@ class ActionChoices(models.TextChoices):
|
||||
review = 'review', _('Review')
|
||||
warning = 'warning', _('Warn')
|
||||
notice = 'notice', _('Notify')
|
||||
notify_and_warn = 'notify_and_warn', _('Notify and warn')
|
||||
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
|
||||
face_verify = 'face_verify', _('Face Verify')
|
||||
face_online = 'face_online', _('Face Online')
|
||||
|
@ -18,7 +18,12 @@ class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
model = LoginACL
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||
action_choices_exclude = [ActionChoices.face_online, ActionChoices.face_verify]
|
||||
action_choices_exclude = [
|
||||
ActionChoices.warning,
|
||||
ActionChoices.notify_and_warn,
|
||||
ActionChoices.face_online,
|
||||
ActionChoices.face_verify
|
||||
]
|
||||
|
||||
def get_rules_serializer(self):
|
||||
return RuleSerializer()
|
||||
|
@ -1,10 +1,10 @@
|
||||
from .asset import *
|
||||
from .category import *
|
||||
from .domain import *
|
||||
from .favorite_asset import *
|
||||
from .mixin import *
|
||||
from .my_asset import *
|
||||
from .node import *
|
||||
from .platform import *
|
||||
from .protocol import *
|
||||
from .tree import *
|
||||
from .my_asset import *
|
||||
from .zone import *
|
||||
|
@ -3,6 +3,7 @@ from .cloud import *
|
||||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .ds import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .permission import *
|
||||
|
@ -11,6 +11,7 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.status import HTTP_200_OK
|
||||
|
||||
from accounts.serializers import AccountSerializer
|
||||
from accounts.tasks import push_accounts_to_assets_task, verify_accounts_connectivity_task
|
||||
from assets import serializers
|
||||
from assets.exceptions import NotSupportedTemporarilyError
|
||||
@ -36,12 +37,12 @@ class AssetFilterSet(BaseFilterSet):
|
||||
platform = drf_filters.CharFilter(method='filter_platform')
|
||||
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
|
||||
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
|
||||
domain = drf_filters.CharFilter(method='filter_domain')
|
||||
zone = drf_filters.CharFilter(method='filter_zone')
|
||||
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
protocols = drf_filters.CharFilter(method='filter_protocols')
|
||||
domain_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__domain_enabled", lookup_expr="exact"
|
||||
gateway_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__gateway_enabled", lookup_expr="exact"
|
||||
)
|
||||
ping_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__ping_enabled", lookup_expr="exact"
|
||||
@ -84,11 +85,11 @@ class AssetFilterSet(BaseFilterSet):
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_domain(queryset, name, value):
|
||||
def filter_zone(queryset, name, value):
|
||||
if is_uuid(value):
|
||||
return queryset.filter(domain_id=value)
|
||||
return queryset.filter(zone_id=value)
|
||||
else:
|
||||
return queryset.filter(domain__name__contains=value)
|
||||
return queryset.filter(zone__name__contains=value)
|
||||
|
||||
@staticmethod
|
||||
def filter_protocols(queryset, name, value):
|
||||
@ -96,10 +97,10 @@ class AssetFilterSet(BaseFilterSet):
|
||||
return queryset.filter(protocols__name__in=value).distinct()
|
||||
|
||||
|
||||
class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
"""
|
||||
API endpoint that allows Asset to be viewed or edited.
|
||||
class BaseAssetViewSet(OrgBulkModelViewSet):
|
||||
"""
|
||||
API endpoint that allows Asset to be viewed or edited.
|
||||
"""
|
||||
model = Asset
|
||||
filterset_class = AssetFilterSet
|
||||
search_fields = ("name", "address", "comment")
|
||||
@ -109,18 +110,19 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
("platform", serializers.PlatformSerializer),
|
||||
("suggestion", serializers.MiniAssetSerializer),
|
||||
("gateways", serializers.GatewaySerializer),
|
||||
("accounts", AccountSerializer),
|
||||
)
|
||||
rbac_perms = (
|
||||
("match", "assets.match_asset"),
|
||||
("platform", "assets.view_platform"),
|
||||
("gateways", "assets.view_gateway"),
|
||||
("accounts", "assets.view_account"),
|
||||
("spec_info", "assets.view_asset"),
|
||||
("gathered_info", "assets.view_asset"),
|
||||
("sync_platform_protocols", "assets.change_asset"),
|
||||
)
|
||||
extra_filter_backends = [
|
||||
IpInFilterBackend,
|
||||
NodeFilterBackend, AttrRulesFilterBackend
|
||||
IpInFilterBackend, NodeFilterBackend, AttrRulesFilterBackend
|
||||
]
|
||||
|
||||
def perform_destroy(self, instance):
|
||||
@ -141,6 +143,25 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
return retrieve_cls
|
||||
return cls
|
||||
|
||||
def paginate_queryset(self, queryset):
|
||||
page = super().paginate_queryset(queryset)
|
||||
if page:
|
||||
page = Asset.compute_all_accounts_amount(page)
|
||||
return page
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
if request.path.find('/api/v1/assets/assets/') > -1:
|
||||
error = _('Cannot create asset directly, you should create a host or other')
|
||||
return Response({'error': error}, status=400)
|
||||
|
||||
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
|
||||
error = _('The number of assets exceeds the limit of 5000')
|
||||
return Response({'error': error}, status=400)
|
||||
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
|
||||
class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
|
||||
@action(methods=["GET"], detail=True, url_path="platform")
|
||||
def platform(self, *args, **kwargs):
|
||||
asset = super().get_object()
|
||||
@ -150,10 +171,10 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
@action(methods=["GET"], detail=True, url_path="gateways")
|
||||
def gateways(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
if not asset.domain:
|
||||
if not asset.zone:
|
||||
gateways = Gateway.objects.none()
|
||||
else:
|
||||
gateways = asset.domain.gateways
|
||||
gateways = asset.zone.gateways
|
||||
return self.get_paginated_response_from_queryset(gateways)
|
||||
|
||||
@action(methods=['post'], detail=False, url_path='sync-platform-protocols')
|
||||
@ -189,17 +210,6 @@ class AssetViewSet(SuggestionMixin, OrgBulkModelViewSet):
|
||||
Protocol.objects.bulk_create(objs)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
if request.path.find('/api/v1/assets/assets/') > -1:
|
||||
error = _('Cannot create asset directly, you should create a host or other')
|
||||
return Response({'error': error}, status=400)
|
||||
|
||||
if not settings.XPACK_LICENSE_IS_VALID and self.model.objects.order_by().count() >= 5000:
|
||||
error = _('The number of assets exceeds the limit of 5000')
|
||||
return Response({'error': error}, status=400)
|
||||
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
def filter_bulk_update_data(self):
|
||||
bulk_data = []
|
||||
skip_assets = []
|
||||
|
@ -1,12 +1,12 @@
|
||||
from assets.models import Cloud, Asset
|
||||
from assets.serializers import CloudSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['CloudViewSet']
|
||||
|
||||
|
||||
class CloudViewSet(AssetViewSet):
|
||||
class CloudViewSet(BaseAssetViewSet):
|
||||
model = Cloud
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
from assets.models import Custom, Asset
|
||||
from assets.serializers import CustomSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['CustomViewSet']
|
||||
|
||||
|
||||
class CustomViewSet(AssetViewSet):
|
||||
class CustomViewSet(BaseAssetViewSet):
|
||||
model = Custom
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
from assets.models import Database, Asset
|
||||
from assets.serializers import DatabaseSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['DatabaseViewSet']
|
||||
|
||||
|
||||
class DatabaseViewSet(AssetViewSet):
|
||||
class DatabaseViewSet(BaseAssetViewSet):
|
||||
model = Database
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
from assets.serializers import DeviceSerializer
|
||||
from assets.models import Device, Asset
|
||||
from .asset import AssetViewSet
|
||||
from assets.serializers import DeviceSerializer
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['DeviceViewSet']
|
||||
|
||||
|
||||
class DeviceViewSet(AssetViewSet):
|
||||
class DeviceViewSet(BaseAssetViewSet):
|
||||
model = Device
|
||||
perm_model = Asset
|
||||
|
||||
|
16
apps/assets/api/asset/ds.py
Normal file
16
apps/assets/api/asset/ds.py
Normal file
@ -0,0 +1,16 @@
|
||||
from assets.models import DirectoryService, Asset
|
||||
from assets.serializers import DSSerializer
|
||||
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['DSViewSet']
|
||||
|
||||
|
||||
class DSViewSet(BaseAssetViewSet):
|
||||
model = DirectoryService
|
||||
perm_model = Asset
|
||||
|
||||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
serializer_classes['default'] = DSSerializer
|
||||
return serializer_classes
|
@ -1,12 +1,12 @@
|
||||
from assets.models import GPT, Asset
|
||||
from assets.serializers import GPTSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['GPTViewSet']
|
||||
|
||||
|
||||
class GPTViewSet(AssetViewSet):
|
||||
class GPTViewSet(BaseAssetViewSet):
|
||||
model = GPT
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
from assets.models import Host, Asset
|
||||
from assets.serializers import HostSerializer
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['HostViewSet']
|
||||
|
||||
|
||||
class HostViewSet(AssetViewSet):
|
||||
class HostViewSet(BaseAssetViewSet):
|
||||
model = Host
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
from assets.models import Web, Asset
|
||||
from assets.serializers import WebSerializer
|
||||
|
||||
from .asset import AssetViewSet
|
||||
from .asset import BaseAssetViewSet
|
||||
|
||||
__all__ = ['WebViewSet']
|
||||
|
||||
|
||||
class WebViewSet(AssetViewSet):
|
||||
class WebViewSet(BaseAssetViewSet):
|
||||
model = Web
|
||||
perm_model = Asset
|
||||
|
||||
|
@ -52,7 +52,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
queryset = (
|
||||
super().get_queryset()
|
||||
.annotate(assets_amount=Coalesce(Subquery(asset_count_subquery), Value(0)))
|
||||
.prefetch_related('protocols', 'automation', 'labels', 'labels__label')
|
||||
.prefetch_related('protocols', 'automation')
|
||||
)
|
||||
queryset = queryset.filter(type__in=AllTypes.get_types_values())
|
||||
return queryset
|
||||
|
@ -9,24 +9,24 @@ from common.utils import get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .asset import HostViewSet
|
||||
from .. import serializers
|
||||
from ..models import Domain, Gateway
|
||||
from ..models import Zone, Gateway
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
|
||||
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
|
||||
|
||||
|
||||
class DomainViewSet(OrgBulkModelViewSet):
|
||||
model = Domain
|
||||
class ZoneViewSet(OrgBulkModelViewSet):
|
||||
model = Zone
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_classes = {
|
||||
'default': serializers.DomainSerializer,
|
||||
'list': serializers.DomainListSerializer,
|
||||
'default': serializers.ZoneSerializer,
|
||||
'list': serializers.ZoneListSerializer,
|
||||
}
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('gateway'):
|
||||
return serializers.DomainWithGatewaySerializer
|
||||
return serializers.ZoneWithGatewaySerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
@ -36,8 +36,8 @@ class DomainViewSet(OrgBulkModelViewSet):
|
||||
|
||||
class GatewayViewSet(HostViewSet):
|
||||
perm_model = Gateway
|
||||
filterset_fields = ("domain__name", "name", "domain")
|
||||
search_fields = ("domain__name",)
|
||||
filterset_fields = ("zone__name", "name", "zone")
|
||||
search_fields = ("zone__name",)
|
||||
|
||||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
|
||||
return serializer_classes
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Domain.get_gateway_queryset()
|
||||
queryset = Zone.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
|
||||
@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Domain.get_gateway_queryset()
|
||||
queryset = Zone.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
@ -3,10 +3,10 @@ import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from socket import gethostname
|
||||
|
||||
import time
|
||||
import yaml
|
||||
from django.conf import settings
|
||||
from django.template.loader import render_to_string
|
||||
@ -334,7 +334,8 @@ class PlaybookPrepareMixin:
|
||||
return sub_playbook_path
|
||||
|
||||
def check_automation_enabled(self, platform, assets):
|
||||
if not platform.automation or not platform.automation.ansible_enabled:
|
||||
automation = getattr(platform, 'automation', None)
|
||||
if not (automation and getattr(automation, 'ansible_enabled', False)):
|
||||
print(_(" - Platform {} ansible disabled").format(platform.name))
|
||||
self.on_assets_not_ansible_enabled(assets)
|
||||
return False
|
||||
|
@ -1,3 +1,5 @@
|
||||
from collections import Counter
|
||||
|
||||
__all__ = ['FormatAssetInfo']
|
||||
|
||||
|
||||
@ -7,13 +9,37 @@ class FormatAssetInfo:
|
||||
self.tp = tp
|
||||
|
||||
@staticmethod
|
||||
def posix_format(info):
|
||||
for cpu_model in info.get('cpu_model', []):
|
||||
if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
|
||||
break
|
||||
else:
|
||||
cpu_model = ''
|
||||
info['cpu_model'] = cpu_model[:48]
|
||||
def get_cpu_model_count(cpus):
|
||||
try:
|
||||
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
|
||||
|
||||
model_counts = Counter(models)
|
||||
|
||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||
except Exception as e:
|
||||
print(f"Error processing CPU model list: {e}")
|
||||
result = ''
|
||||
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_gpu_model_count(gpus):
|
||||
try:
|
||||
model_counts = Counter(gpus)
|
||||
|
||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||
except Exception as e:
|
||||
print(f"Error processing GPU model list: {e}")
|
||||
result = ''
|
||||
|
||||
return result
|
||||
|
||||
def posix_format(self, info):
|
||||
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
|
||||
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
|
||||
|
||||
info['gpu_model'] = gpus
|
||||
info['cpu_model'] = cpus
|
||||
info['cpu_count'] = info.get('cpu_count', 0)
|
||||
return info
|
||||
|
||||
|
@ -23,5 +23,16 @@
|
||||
arch: "{{ ansible_architecture }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
|
||||
|
||||
- name: Get GPU info with nvidia-smi
|
||||
shell: |
|
||||
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
|
||||
register: gpu_info
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Merge GPU info into final info
|
||||
set_fact:
|
||||
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
|
@ -2,9 +2,12 @@ id: gather_facts_windows
|
||||
name: "{{ 'Gather facts windows' | trans }}"
|
||||
version: 1
|
||||
method: gather_facts
|
||||
category: host
|
||||
category:
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- windows
|
||||
- windows_ad
|
||||
i18n:
|
||||
Gather facts windows:
|
||||
zh: '使用 Ansible 指令 gather_facts 从 Windows 获取设备信息'
|
||||
|
@ -3,8 +3,10 @@ name: "{{ 'Ping by pyfreerdp' | trans }}"
|
||||
category:
|
||||
- device
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- windows
|
||||
- windows_ad
|
||||
method: ping
|
||||
protocol: rdp
|
||||
priority: 1
|
||||
|
@ -3,6 +3,7 @@ name: "{{ 'Ping by paramiko' | trans }}"
|
||||
category:
|
||||
- device
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- all
|
||||
method: ping
|
||||
|
@ -3,6 +3,7 @@ name: "{{ 'Ping by telnet' | trans }}"
|
||||
category:
|
||||
- device
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- all
|
||||
method: ping
|
||||
|
@ -2,9 +2,12 @@ id: win_ping
|
||||
name: "{{ 'Windows ping' | trans }}"
|
||||
version: 1
|
||||
method: ping
|
||||
category: host
|
||||
category:
|
||||
- host
|
||||
- ds
|
||||
type:
|
||||
- windows
|
||||
- windows_ad
|
||||
i18n:
|
||||
Windows ping:
|
||||
zh: 使用 Ansible 模块 内置模块 win_ping 来测试可连接性
|
||||
|
@ -37,10 +37,11 @@ class PingManager(BasePlaybookManager):
|
||||
def on_host_error(self, host, error, result):
|
||||
asset, account = self.host_asset_and_account_mapper.get(host)
|
||||
try:
|
||||
asset.set_connectivity(Connectivity.ERR)
|
||||
error_tp = asset.get_err_connectivity(error)
|
||||
asset.set_connectivity(error_tp)
|
||||
if not account:
|
||||
return
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
account.set_connectivity(error_tp)
|
||||
except Exception as e:
|
||||
print(f'\033[31m Update account {account.name} or '
|
||||
f'update asset {asset.name} connectivity failed: {e} \033[0m\n')
|
||||
|
@ -7,6 +7,12 @@ class Connectivity(TextChoices):
|
||||
NA = 'na', _('N/A')
|
||||
OK = 'ok', _('OK')
|
||||
ERR = 'err', _('Error')
|
||||
AUTH_ERR = 'auth_err', _('Authentication error')
|
||||
SUDO_ERR = 'sudo_err', _('Sudo permission error')
|
||||
PASSWORD_ERR = 'password_err', _('Invalid password error')
|
||||
OPENSSH_KEY_ERR = 'openssh_key_err', _('OpenSSH key error')
|
||||
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
||||
CREATE_DIR_ERR = 'create_dir_err', _('Create directory error')
|
||||
|
||||
|
||||
class AutomationTypes(TextChoices):
|
||||
|
@ -37,7 +37,7 @@ class FillType(models.TextChoices):
|
||||
class BaseType(TextChoices):
|
||||
"""
|
||||
约束应该考虑代是对平台对限制,避免多余对选项,如: mysql 开启 ssh,
|
||||
或者开启了也没有作用, 比如 k8s 开启了 domain,目前还不支持
|
||||
或者开启了也没有作用, 比如 k8s 开启了 gateway 目前还不支持
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
@ -112,8 +112,7 @@ class BaseType(TextChoices):
|
||||
|
||||
@classmethod
|
||||
def get_choices(cls):
|
||||
if not settings.XPACK_LICENSE_IS_VALID:
|
||||
choices = cls.choices
|
||||
if not settings.XPACK_LICENSE_IS_VALID and hasattr(cls, 'get_community_types'):
|
||||
choices = [(tp.value, tp.label) for tp in cls.get_community_types()]
|
||||
else:
|
||||
choices = cls.choices
|
||||
return choices
|
||||
|
@ -12,6 +12,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
||||
DATABASE = 'database', _("Database")
|
||||
CLOUD = 'cloud', _("Cloud service")
|
||||
WEB = 'web', _("Web")
|
||||
DS = 'ds', _("Directory service")
|
||||
CUSTOM = 'custom', _("Custom type")
|
||||
|
||||
@classmethod
|
||||
|
@ -13,11 +13,11 @@ class CloudTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': False,
|
||||
'gateway_enabled': False,
|
||||
'su_enabled': False,
|
||||
},
|
||||
cls.K8S: {
|
||||
'domain_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ class CustomTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': False,
|
||||
'gateway_enabled': False,
|
||||
'su_enabled': False,
|
||||
},
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ class DatabaseTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
'su_enabled': False,
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,8 @@ class DeviceTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
'ds_enabled': True,
|
||||
'su_enabled': True,
|
||||
'su_methods': ['enable', 'super', 'super_level']
|
||||
}
|
||||
|
70
apps/assets/const/ds.py
Normal file
70
apps/assets/const/ds.py
Normal file
@ -0,0 +1,70 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .base import BaseType
|
||||
|
||||
|
||||
class DirectoryTypes(BaseType):
|
||||
GENERAL = 'general', _('General')
|
||||
# LDAP = 'ldap', _('LDAP')
|
||||
# AD = 'ad', _('Active Directory')
|
||||
WINDOWS_AD = 'windows_ad', _('Windows Active Directory')
|
||||
|
||||
# AZURE_AD = 'azure_ad', _('Azure Active Directory')
|
||||
|
||||
@classmethod
|
||||
def _get_base_constrains(cls) -> dict:
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
'ds_enabled': False,
|
||||
'su_enabled': True,
|
||||
},
|
||||
cls.WINDOWS_AD: {
|
||||
'su_enabled': False,
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def _get_automation_constrains(cls) -> dict:
|
||||
constrains = {
|
||||
'*': {
|
||||
'ansible_enabled': False,
|
||||
},
|
||||
cls.WINDOWS_AD: {
|
||||
'ansible_enabled': True,
|
||||
'ping_enabled': True,
|
||||
'gather_facts_enabled': True,
|
||||
'verify_account_enabled': True,
|
||||
'change_secret_enabled': True,
|
||||
'push_account_enabled': True,
|
||||
'gather_accounts_enabled': True,
|
||||
'remove_account_enabled': True,
|
||||
}
|
||||
}
|
||||
return constrains
|
||||
|
||||
@classmethod
|
||||
def _get_protocol_constrains(cls) -> dict:
|
||||
return {
|
||||
cls.GENERAL: {
|
||||
'choices': ['ssh']
|
||||
},
|
||||
cls.WINDOWS_AD: {
|
||||
'choices': ['rdp', 'ssh', 'vnc', 'winrm']
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def internal_platforms(cls):
|
||||
return {
|
||||
cls.WINDOWS_AD: [
|
||||
{'name': 'Windows Active Directory'}
|
||||
],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_community_types(cls):
|
||||
return [
|
||||
cls.GENERAL,
|
||||
]
|
@ -11,7 +11,7 @@ class GPTTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': False,
|
||||
'gateway_enabled': False,
|
||||
'su_enabled': False,
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,9 @@ class HostTypes(BaseType):
|
||||
'*': {
|
||||
'charset_enabled': True,
|
||||
'charset': 'utf-8', # default
|
||||
'domain_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
'su_enabled': True,
|
||||
'ds_enabled': True,
|
||||
'su_methods': ['sudo', 'su', 'only_sudo', 'only_su'],
|
||||
},
|
||||
cls.WINDOWS: {
|
||||
@ -56,7 +57,6 @@ class HostTypes(BaseType):
|
||||
'change_secret_enabled': True,
|
||||
'push_account_enabled': True,
|
||||
'remove_account_enabled': True,
|
||||
|
||||
},
|
||||
cls.WINDOWS: {
|
||||
'ansible_config': {
|
||||
@ -69,7 +69,6 @@ class HostTypes(BaseType):
|
||||
'ping_enabled': False,
|
||||
'gather_facts_enabled': False,
|
||||
'gather_accounts_enabled': False,
|
||||
'verify_account_enabled': False,
|
||||
'change_secret_enabled': False,
|
||||
'push_account_enabled': False
|
||||
},
|
||||
@ -82,7 +81,7 @@ class HostTypes(BaseType):
|
||||
{'name': 'Linux'},
|
||||
{
|
||||
'name': GATEWAY_NAME,
|
||||
'domain_enabled': True,
|
||||
'gateway_enabled': True,
|
||||
}
|
||||
],
|
||||
cls.UNIX: [
|
||||
@ -126,5 +125,5 @@ class HostTypes(BaseType):
|
||||
@classmethod
|
||||
def get_community_types(cls) -> list:
|
||||
return [
|
||||
cls.LINUX, cls.UNIX, cls.WINDOWS, cls.OTHER_HOST
|
||||
cls.LINUX, cls.WINDOWS, cls.UNIX, cls.OTHER_HOST
|
||||
]
|
||||
|
@ -13,6 +13,7 @@ from .cloud import CloudTypes
|
||||
from .custom import CustomTypes
|
||||
from .database import DatabaseTypes
|
||||
from .device import DeviceTypes
|
||||
from .ds import DirectoryTypes
|
||||
from .gpt import GPTTypes
|
||||
from .host import HostTypes
|
||||
from .web import WebTypes
|
||||
@ -22,7 +23,8 @@ class AllTypes(ChoicesMixin):
|
||||
choices: list
|
||||
includes = [
|
||||
HostTypes, DeviceTypes, DatabaseTypes,
|
||||
CloudTypes, WebTypes, CustomTypes, GPTTypes
|
||||
CloudTypes, WebTypes, CustomTypes,
|
||||
DirectoryTypes, GPTTypes
|
||||
]
|
||||
_category_constrains = {}
|
||||
_automation_methods = None
|
||||
@ -173,6 +175,7 @@ class AllTypes(ChoicesMixin):
|
||||
(Category.DATABASE, DatabaseTypes),
|
||||
(Category.WEB, WebTypes),
|
||||
(Category.CLOUD, CloudTypes),
|
||||
(Category.DS, DirectoryTypes),
|
||||
(Category.CUSTOM, CustomTypes)
|
||||
]
|
||||
return types
|
||||
@ -309,7 +312,7 @@ class AllTypes(ChoicesMixin):
|
||||
'category': category,
|
||||
'type': tp, 'internal': True,
|
||||
'charset': constraints.get('charset', 'utf-8'),
|
||||
'domain_enabled': constraints.get('domain_enabled', False),
|
||||
'gateway_enabled': constraints.get('gateway_enabled', False),
|
||||
'su_enabled': constraints.get('su_enabled', False),
|
||||
}
|
||||
if data['su_enabled'] and data.get('su_methods'):
|
||||
|
@ -11,7 +11,7 @@ class WebTypes(BaseType):
|
||||
return {
|
||||
'*': {
|
||||
'charset_enabled': False,
|
||||
'domain_enabled': False,
|
||||
'gateway_enabled': False,
|
||||
'su_enabled': False,
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
# Generated by Django 4.1.13 on 2024-05-09 03:16
|
||||
|
||||
import json
|
||||
import assets.models.asset.common
|
||||
from django.db.models import F, Q
|
||||
import django.db.models.deletion
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
from django.db.models import F
|
||||
|
||||
import assets.models.asset.common
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
@ -39,22 +39,26 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='automationexecution',
|
||||
name='automation',
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions', to='assets.baseautomation', verbose_name='Automation task'),
|
||||
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='executions',
|
||||
to='assets.baseautomation', verbose_name='Automation task'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='domain',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='assets', to='assets.domain', verbose_name='Zone'),
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='assets', to='assets.domain', verbose_name='Zone'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='nodes',
|
||||
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets', to='assets.node', verbose_name='Nodes'),
|
||||
field=models.ManyToManyField(default=assets.models.asset.common.default_node, related_name='assets',
|
||||
to='assets.node', verbose_name='Nodes'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='asset',
|
||||
name='platform',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets', to='assets.platform', verbose_name='Platform'),
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='assets',
|
||||
to='assets.platform', verbose_name='Platform'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='AssetBaseAutomation',
|
||||
@ -71,7 +75,9 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='GatherFactsAutomation',
|
||||
fields=[
|
||||
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
('baseautomation_ptr',
|
||||
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Gather asset facts',
|
||||
@ -81,7 +87,9 @@ class Migration(migrations.Migration):
|
||||
migrations.CreateModel(
|
||||
name='PingAutomation',
|
||||
fields=[
|
||||
('baseautomation_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
('baseautomation_ptr',
|
||||
models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True,
|
||||
primary_key=True, serialize=False, to='assets.baseautomation')),
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Ping asset',
|
||||
|
@ -18,7 +18,7 @@ platforms_data_json = '''[
|
||||
"type": "linux",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -119,7 +119,7 @@ platforms_data_json = '''[
|
||||
"type": "unix",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -209,7 +209,7 @@ platforms_data_json = '''[
|
||||
"type": "unix",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -299,7 +299,7 @@ platforms_data_json = '''[
|
||||
"type": "unix",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -389,7 +389,7 @@ platforms_data_json = '''[
|
||||
"type": "windows",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -481,7 +481,7 @@ platforms_data_json = '''[
|
||||
"security": "any"
|
||||
},
|
||||
"internal": false,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -582,7 +582,7 @@ platforms_data_json = '''[
|
||||
"type": "other",
|
||||
"meta": {},
|
||||
"internal": false,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -684,7 +684,7 @@ platforms_data_json = '''[
|
||||
"security": "rdp"
|
||||
},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -776,7 +776,7 @@ platforms_data_json = '''[
|
||||
"security": "tls"
|
||||
},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -866,7 +866,7 @@ platforms_data_json = '''[
|
||||
"type": "unix",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -956,7 +956,7 @@ platforms_data_json = '''[
|
||||
"type": "linux",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1057,7 +1057,7 @@ platforms_data_json = '''[
|
||||
"type": "windows",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1136,7 +1136,7 @@ platforms_data_json = '''[
|
||||
"type": "general",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1201,7 +1201,7 @@ platforms_data_json = '''[
|
||||
"type": "general",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": "enable",
|
||||
"custom_fields": [],
|
||||
@ -1266,7 +1266,7 @@ platforms_data_json = '''[
|
||||
"type": "general",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": "super",
|
||||
"custom_fields": [],
|
||||
@ -1332,7 +1332,7 @@ platforms_data_json = '''[
|
||||
"type": "general",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": true,
|
||||
"su_method": "super_level",
|
||||
"custom_fields": [],
|
||||
@ -1397,7 +1397,7 @@ platforms_data_json = '''[
|
||||
"type": "mysql",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1449,7 +1449,7 @@ platforms_data_json = '''[
|
||||
"type": "mariadb",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1501,7 +1501,7 @@ platforms_data_json = '''[
|
||||
"type": "postgresql",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1553,7 +1553,7 @@ platforms_data_json = '''[
|
||||
"type": "oracle",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1605,7 +1605,7 @@ platforms_data_json = '''[
|
||||
"type": "sqlserver",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1657,7 +1657,7 @@ platforms_data_json = '''[
|
||||
"type": "clickhouse",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1709,7 +1709,7 @@ platforms_data_json = '''[
|
||||
"type": "mongodb",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1761,7 +1761,7 @@ platforms_data_json = '''[
|
||||
"type": "redis",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1815,7 +1815,7 @@ platforms_data_json = '''[
|
||||
"type": "redis",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1869,7 +1869,7 @@ platforms_data_json = '''[
|
||||
"type": "website",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": false,
|
||||
"gateway_enabled": false,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1924,7 +1924,7 @@ platforms_data_json = '''[
|
||||
"type": "private",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": false,
|
||||
"gateway_enabled": false,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -1979,7 +1979,7 @@ platforms_data_json = '''[
|
||||
"type": "k8s",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": false,
|
||||
"gateway_enabled": false,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -2029,7 +2029,7 @@ platforms_data_json = '''[
|
||||
"type": "chatgpt",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": false,
|
||||
"gateway_enabled": false,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -2081,7 +2081,7 @@ platforms_data_json = '''[
|
||||
"type": "db2",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
@ -2131,7 +2131,7 @@ platforms_data_json = '''[
|
||||
"type": "dameng",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"domain_enabled": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
|
57
apps/assets/migrations/0016_directory_service.py
Normal file
57
apps/assets/migrations/0016_directory_service.py
Normal file
@ -0,0 +1,57 @@
|
||||
# Generated by Django 4.1.13 on 2025-04-03 09:51
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("assets", "0015_automationexecution_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="DirectoryService",
|
||||
fields=[
|
||||
(
|
||||
"asset_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="assets.asset",
|
||||
),
|
||||
),
|
||||
(
|
||||
"domain_name",
|
||||
models.CharField(
|
||||
blank=True,
|
||||
default="",
|
||||
max_length=128,
|
||||
verbose_name="Domain name",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"verbose_name": "Directory service",
|
||||
"default_related_name": "ds"
|
||||
},
|
||||
bases=("assets.asset",),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="platform",
|
||||
name="ds_enabled",
|
||||
field=models.BooleanField(default=False, verbose_name="DS enabled"),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="asset",
|
||||
name="directory_services",
|
||||
field=models.ManyToManyField(
|
||||
related_name="assets",
|
||||
to="assets.directoryservice",
|
||||
verbose_name="Directory services",
|
||||
)
|
||||
),
|
||||
]
|
165
apps/assets/migrations/0017_auto_20250407_1124.py
Normal file
165
apps/assets/migrations/0017_auto_20250407_1124.py
Normal file
@ -0,0 +1,165 @@
|
||||
# Generated by Django 4.1.13 on 2025-04-07 03:24
|
||||
|
||||
import json
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
from assets.const import AllTypes
|
||||
|
||||
|
||||
def add_ds_platforms(apps, schema_editor):
|
||||
data = """
|
||||
[
|
||||
{
|
||||
"created_by": "system",
|
||||
"updated_by": "system",
|
||||
"comment": "",
|
||||
"name": "WindowsActiveDirectory",
|
||||
"category": "ds",
|
||||
"type": "windows_ad",
|
||||
"meta": {},
|
||||
"internal": true,
|
||||
"gateway_enabled": true,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [],
|
||||
"automation": {
|
||||
"ansible_enabled": true,
|
||||
"ansible_config": {
|
||||
"ansible_shell_type": "cmd",
|
||||
"ansible_connection": "ssh"
|
||||
},
|
||||
"ping_enabled": true,
|
||||
"ping_method": "ping_by_rdp",
|
||||
"ping_params": {},
|
||||
"gather_facts_enabled": true,
|
||||
"gather_facts_method": "gather_facts_windows",
|
||||
"gather_facts_params": {},
|
||||
"change_secret_enabled": true,
|
||||
"change_secret_method": "change_secret_ad_windows",
|
||||
"change_secret_params": {
|
||||
},
|
||||
"push_account_enabled": true,
|
||||
"push_account_method": "push_account_ad_windows",
|
||||
"push_account_params": {},
|
||||
"verify_account_enabled": true,
|
||||
"verify_account_method": "verify_account_by_rdp",
|
||||
"verify_account_params": {
|
||||
|
||||
},
|
||||
"gather_accounts_enabled": true,
|
||||
"gather_accounts_method": "gather_accounts_windows_ad",
|
||||
"gather_accounts_params": {
|
||||
|
||||
},
|
||||
"remove_account_enabled": true,
|
||||
"remove_account_method": "remove_account_ad_windows",
|
||||
"remove_account_params": {
|
||||
|
||||
}
|
||||
},
|
||||
"protocols": [
|
||||
{
|
||||
"name": "rdp",
|
||||
"port": 3389,
|
||||
"primary": true,
|
||||
"required": false,
|
||||
"default": false,
|
||||
"public": true,
|
||||
"setting": {
|
||||
"console": false,
|
||||
"security": "any"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "ssh",
|
||||
"port": 22,
|
||||
"primary": false,
|
||||
"required": false,
|
||||
"default": false,
|
||||
"public": true,
|
||||
"setting": {
|
||||
"sftp_enabled": true,
|
||||
"sftp_home": "/tmp"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "vnc",
|
||||
"port": 5900,
|
||||
"primary": false,
|
||||
"required": false,
|
||||
"default": false,
|
||||
"public": true,
|
||||
"setting": {
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "winrm",
|
||||
"port": 5985,
|
||||
"primary": false,
|
||||
"required": false,
|
||||
"default": false,
|
||||
"public": false,
|
||||
"setting": {
|
||||
"use_ssl": false
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"created_by": "system",
|
||||
"updated_by": "system",
|
||||
"comment": "",
|
||||
"name": "General",
|
||||
"category": "ds",
|
||||
"type": "general",
|
||||
"meta": {
|
||||
},
|
||||
"internal": true,
|
||||
"gateway_enabled": false,
|
||||
"su_enabled": false,
|
||||
"su_method": null,
|
||||
"custom_fields": [
|
||||
],
|
||||
"automation": {
|
||||
"ansible_enabled": false,
|
||||
"ansible_config": {
|
||||
}
|
||||
},
|
||||
"protocols": [
|
||||
{
|
||||
"name": "ssh",
|
||||
"port": 22,
|
||||
"primary": true,
|
||||
"required": false,
|
||||
"default": false,
|
||||
"public": true,
|
||||
"setting": {
|
||||
"sftp_enabled": true,
|
||||
"sftp_home": "/tmp"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"""
|
||||
platform_model = apps.get_model('assets', 'Platform')
|
||||
automation_cls = apps.get_model('assets', 'PlatformAutomation')
|
||||
platform_datas = json.loads(data)
|
||||
|
||||
for platform_data in platform_datas:
|
||||
AllTypes.create_or_update_by_platform_data(
|
||||
platform_data, platform_cls=platform_model,
|
||||
automation_cls=automation_cls
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("assets", "0016_directory_service"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(add_ds_platforms)
|
||||
]
|
26
apps/assets/migrations/0018_rename_domain_zone.py
Normal file
26
apps/assets/migrations/0018_rename_domain_zone.py
Normal file
@ -0,0 +1,26 @@
|
||||
# Generated by Django 4.1.13 on 2025-04-18 08:05
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("assets", "0017_auto_20250407_1124"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="platform",
|
||||
old_name="domain_enabled",
|
||||
new_name="gateway_enabled",
|
||||
),
|
||||
migrations.RenameModel(
|
||||
old_name="Domain",
|
||||
new_name="Zone",
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name="asset",
|
||||
old_name="domain",
|
||||
new_name="zone",
|
||||
),
|
||||
]
|
@ -1,9 +1,10 @@
|
||||
# noqa
|
||||
from .base import *
|
||||
from .platform import *
|
||||
from .asset import *
|
||||
from .label import Label
|
||||
from .gateway import *
|
||||
from .domain import *
|
||||
from .zone import * # noqa
|
||||
from .node import *
|
||||
from .favorite_asset import *
|
||||
from .automations import *
|
||||
|
@ -3,6 +3,7 @@ from .common import *
|
||||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .ds import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .web import *
|
||||
|
@ -6,7 +6,7 @@ import logging
|
||||
from collections import defaultdict
|
||||
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.db.models import Q, Count
|
||||
from django.forms import model_to_dict
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
@ -168,13 +168,17 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
platform = models.ForeignKey(
|
||||
Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets'
|
||||
)
|
||||
domain = models.ForeignKey(
|
||||
"assets.Domain", null=True, blank=True, related_name='assets',
|
||||
zone = models.ForeignKey(
|
||||
"assets.Zone", null=True, blank=True, related_name='assets',
|
||||
verbose_name=_("Zone"), on_delete=models.SET_NULL
|
||||
)
|
||||
nodes = models.ManyToManyField(
|
||||
'assets.Node', default=default_node, related_name='assets', verbose_name=_("Nodes")
|
||||
)
|
||||
directory_services = models.ManyToManyField(
|
||||
'assets.DirectoryService', related_name='assets',
|
||||
verbose_name=_("Directory services")
|
||||
)
|
||||
is_active = models.BooleanField(default=True, verbose_name=_('Active'))
|
||||
gathered_info = models.JSONField(verbose_name=_('Gathered info'), default=dict, blank=True) # 资产的一些信息,如 硬件信息
|
||||
custom_info = models.JSONField(verbose_name=_('Custom info'), default=dict)
|
||||
@ -201,6 +205,10 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
info[i.name] = v
|
||||
return info
|
||||
|
||||
@lazyproperty
|
||||
def is_directory_service(self):
|
||||
return self.category == const.Category.DS and hasattr(self, 'ds')
|
||||
|
||||
@lazyproperty
|
||||
def spec_info(self):
|
||||
instance = getattr(self, self.category, None)
|
||||
@ -236,7 +244,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
platform = self.platform
|
||||
auto_config = {
|
||||
'su_enabled': platform.su_enabled,
|
||||
'domain_enabled': platform.domain_enabled,
|
||||
'gateway_enabled': platform.gateway_enabled,
|
||||
'ansible_enabled': False
|
||||
}
|
||||
automation = getattr(self.platform, 'automation', None)
|
||||
@ -245,9 +253,28 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
auto_config.update(model_to_dict(automation))
|
||||
return auto_config
|
||||
|
||||
@property
|
||||
def all_accounts(self):
|
||||
if not self.joined_dir_svcs:
|
||||
queryset = self.accounts.all()
|
||||
else:
|
||||
queryset = self.accounts.model.objects.filter(asset__in=[self.id, *self.joined_dir_svcs])
|
||||
return queryset
|
||||
|
||||
@property
|
||||
def dc_accounts(self):
|
||||
queryset = self.accounts.model.objects.filter(asset__in=[*self.joined_dir_svcs])
|
||||
return queryset
|
||||
|
||||
@lazyproperty
|
||||
def all_valid_accounts(self):
|
||||
queryset = (self.all_accounts.filter(is_active=True)
|
||||
.prefetch_related('asset', 'asset__platform'))
|
||||
return queryset
|
||||
|
||||
@lazyproperty
|
||||
def accounts_amount(self):
|
||||
return self.accounts.count()
|
||||
return self.all_accounts.count()
|
||||
|
||||
def get_target_ip(self):
|
||||
return self.address
|
||||
@ -259,6 +286,41 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
protocol = self.protocols.all().filter(name=protocol).first()
|
||||
return protocol.port if protocol else 0
|
||||
|
||||
def is_dir_svc(self):
|
||||
return self.category == const.Category.DS
|
||||
|
||||
@property
|
||||
def joined_dir_svcs(self):
|
||||
return self.directory_services.all()
|
||||
|
||||
@classmethod
|
||||
def compute_all_accounts_amount(cls, assets):
|
||||
from .ds import DirectoryService
|
||||
asset_ids = [asset.id for asset in assets]
|
||||
asset_id_dc_ids_mapper = defaultdict(list)
|
||||
dc_ids = set()
|
||||
|
||||
asset_dc_relations = (
|
||||
Asset.directory_services.through.objects
|
||||
.filter(asset_id__in=asset_ids)
|
||||
.values_list('asset_id', 'directoryservice_id')
|
||||
)
|
||||
for asset_id, ds_id in asset_dc_relations:
|
||||
dc_ids.add(ds_id)
|
||||
asset_id_dc_ids_mapper[asset_id].append(ds_id)
|
||||
|
||||
directory_services = (
|
||||
DirectoryService.objects.filter(id__in=dc_ids)
|
||||
.annotate(accounts_amount=Count('accounts'))
|
||||
)
|
||||
ds_accounts_amount_mapper = {ds.id: ds.accounts_amount for ds in directory_services}
|
||||
for asset in assets:
|
||||
asset_dc_ids = asset_id_dc_ids_mapper.get(asset.id, [])
|
||||
for dc_id in asset_dc_ids:
|
||||
ds_accounts = ds_accounts_amount_mapper.get(dc_id, 0)
|
||||
asset.accounts_amount += ds_accounts
|
||||
return assets
|
||||
|
||||
@property
|
||||
def is_valid(self):
|
||||
warning = ''
|
||||
@ -300,11 +362,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
||||
|
||||
@lazyproperty
|
||||
def gateway(self):
|
||||
if not self.domain_id:
|
||||
if not self.zone_id:
|
||||
return
|
||||
if not self.platform.domain_enabled:
|
||||
if not self.platform.gateway_enabled:
|
||||
return
|
||||
return self.domain.select_gateway()
|
||||
return self.zone.select_gateway()
|
||||
|
||||
def as_node(self):
|
||||
from assets.models import Node
|
||||
|
14
apps/assets/models/asset/ds.py
Normal file
14
apps/assets/models/asset/ds.py
Normal file
@ -0,0 +1,14 @@
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from .common import Asset
|
||||
|
||||
__all__ = ['DirectoryService']
|
||||
|
||||
|
||||
class DirectoryService(Asset):
|
||||
domain_name = models.CharField(max_length=128, blank=True, default='', verbose_name=_("Domain name"))
|
||||
|
||||
class Meta:
|
||||
default_related_name = "ds"
|
||||
verbose_name = _("Directory service")
|
@ -23,6 +23,28 @@ class AbsConnectivity(models.Model):
|
||||
self.date_verified = timezone.now()
|
||||
self.save(update_fields=['connectivity', 'date_verified'])
|
||||
|
||||
@staticmethod
|
||||
def get_err_connectivity(msg=None):
|
||||
msg = (msg or '').strip().lower()
|
||||
|
||||
error_map = {
|
||||
'permission denied': Connectivity.AUTH_ERR,
|
||||
'authentication failed': Connectivity.AUTH_ERR,
|
||||
'authentication failure': Connectivity.AUTH_ERR,
|
||||
'is not in the sudoers file': Connectivity.SUDO_ERR,
|
||||
'expected openssh key': Connectivity.OPENSSH_KEY_ERR,
|
||||
'invalid/incorrect password': Connectivity.PASSWORD_ERR,
|
||||
'failed to create directory': Connectivity.CREATE_DIR_ERR,
|
||||
'ntlm: the specified credentials were rejected by the server': Connectivity.NTLM_ERR,
|
||||
|
||||
}
|
||||
|
||||
for key, value in error_map.items():
|
||||
if key in msg:
|
||||
return value
|
||||
|
||||
return Connectivity.ERR
|
||||
|
||||
@property
|
||||
def is_connective(self):
|
||||
if self.connectivity == Connectivity.OK:
|
||||
|
@ -101,7 +101,8 @@ class Platform(LabeledMixin, JMSBaseModel):
|
||||
default=CharsetChoices.utf8, choices=CharsetChoices.choices,
|
||||
max_length=8, verbose_name=_("Charset")
|
||||
)
|
||||
domain_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
|
||||
gateway_enabled = models.BooleanField(default=True, verbose_name=_("Gateway enabled"))
|
||||
ds_enabled = models.BooleanField(default=False, verbose_name=_("DS enabled"))
|
||||
# 账号有关的
|
||||
su_enabled = models.BooleanField(default=False, verbose_name=_("Su enabled"))
|
||||
su_method = models.CharField(max_length=32, blank=True, null=True, verbose_name=_("Su method"))
|
||||
@ -115,6 +116,11 @@ class Platform(LabeledMixin, JMSBaseModel):
|
||||
def assets_amount(self):
|
||||
return self.assets.count()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.ds_enabled:
|
||||
self.ds = None
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def default(cls):
|
||||
linux, created = cls.objects.get_or_create(
|
||||
|
@ -12,10 +12,10 @@ from .gateway import Gateway
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['Domain']
|
||||
__all__ = ['Zone']
|
||||
|
||||
|
||||
class Domain(LabeledMixin, JMSOrgBaseModel):
|
||||
class Zone(LabeledMixin, JMSOrgBaseModel):
|
||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||
|
||||
class Meta:
|
||||
@ -49,7 +49,7 @@ class Domain(LabeledMixin, JMSOrgBaseModel):
|
||||
|
||||
@property
|
||||
def gateways(self):
|
||||
queryset = self.get_gateway_queryset().filter(domain=self)
|
||||
queryset = self.get_gateway_queryset().filter(zone=self)
|
||||
return queryset
|
||||
|
||||
@classmethod
|
@ -4,6 +4,7 @@ from .common import *
|
||||
from .custom import *
|
||||
from .database import *
|
||||
from .device import *
|
||||
from .ds import *
|
||||
from .gpt import *
|
||||
from .host import *
|
||||
from .web import *
|
||||
|
@ -147,18 +147,20 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
||||
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
||||
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'), attrs=('id', 'name', 'type'))
|
||||
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
|
||||
attrs=('id', 'name', 'type'))
|
||||
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
|
||||
_accounts = None
|
||||
|
||||
class Meta:
|
||||
model = Asset
|
||||
fields_fk = ['domain', 'platform']
|
||||
fields_fk = ['zone', 'platform']
|
||||
fields_mini = ['id', 'name', 'address'] + fields_fk
|
||||
fields_small = fields_mini + ['is_active', 'comment']
|
||||
fields_m2m = [
|
||||
'nodes', 'labels', 'protocols',
|
||||
'nodes_display', 'accounts',
|
||||
'directory_services',
|
||||
]
|
||||
read_only_fields = [
|
||||
'accounts_amount', 'category', 'type', 'connectivity', 'auto_config',
|
||||
@ -172,6 +174,11 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
||||
'address': {'label': _('Address')},
|
||||
'nodes_display': {'label': _('Node path')},
|
||||
'nodes': {'allow_empty': True, 'label': _("Nodes")},
|
||||
'directory_services': {
|
||||
'required': False,
|
||||
'allow_empty': True,
|
||||
'default': list, 'label': _("Directory service")
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -226,15 +233,11 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
||||
@classmethod
|
||||
def setup_eager_loading(cls, queryset):
|
||||
""" Perform necessary eager loading of data. """
|
||||
queryset = queryset.prefetch_related('domain', 'nodes', 'protocols', ) \
|
||||
queryset = queryset.prefetch_related('zone', 'nodes', 'protocols', 'directory_services') \
|
||||
.prefetch_related('platform', 'platform__automation') \
|
||||
.annotate(category=F("platform__category")) \
|
||||
.annotate(type=F("platform__type")) \
|
||||
.annotate(accounts_amount=Count('accounts'))
|
||||
if queryset.model is Asset:
|
||||
queryset = queryset.prefetch_related('labels__label', 'labels')
|
||||
else:
|
||||
queryset = queryset.prefetch_related('asset_ptr__labels__label', 'asset_ptr__labels')
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
@ -268,9 +271,9 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
||||
raise serializers.ValidationError({'platform': _("Platform not exist")})
|
||||
return platform
|
||||
|
||||
def validate_domain(self, value):
|
||||
def validate_zone(self, value):
|
||||
platform = self._asset_platform
|
||||
if platform.domain_enabled:
|
||||
if platform.gateway_enabled:
|
||||
return value
|
||||
else:
|
||||
return None
|
||||
|
22
apps/assets/serializers/asset/ds.py
Normal file
22
apps/assets/serializers/asset/ds.py
Normal file
@ -0,0 +1,22 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from assets.models import DirectoryService
|
||||
from .common import AssetSerializer
|
||||
|
||||
__all__ = ['DSSerializer']
|
||||
|
||||
|
||||
class DSSerializer(AssetSerializer):
|
||||
class Meta(AssetSerializer.Meta):
|
||||
model = DirectoryService
|
||||
fields = AssetSerializer.Meta.fields + [
|
||||
'domain_name',
|
||||
]
|
||||
extra_kwargs = {
|
||||
**AssetSerializer.Meta.extra_kwargs,
|
||||
'domain_name': {
|
||||
'help_text': _('The domain part used by the directory service (e.g., AD) and appended to '
|
||||
'the username during login, such as example.com in user@example.com.'),
|
||||
'label': _('Domain name')
|
||||
}
|
||||
}
|
@ -6,7 +6,7 @@ class HostGatheredInfoSerializer(serializers.Serializer):
|
||||
vendor = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('Vendor'))
|
||||
model = serializers.CharField(max_length=54, required=False, allow_blank=True, label=_('Model'))
|
||||
sn = serializers.CharField(max_length=128, required=False, allow_blank=True, label=_('Serial number'))
|
||||
cpu_model = serializers.CharField(max_length=64, allow_blank=True, required=False, label=_('CPU model'))
|
||||
cpu_model = serializers.CharField(allow_blank=True, required=False, label=_('CPU model'))
|
||||
cpu_count = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU count'))
|
||||
cpu_cores = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU cores'))
|
||||
cpu_vcpus = serializers.CharField(max_length=64, required=False, allow_blank=True, label=_('CPU vcpus'))
|
||||
@ -17,7 +17,10 @@ class HostGatheredInfoSerializer(serializers.Serializer):
|
||||
distribution_version = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS version'))
|
||||
arch = serializers.CharField(max_length=16, allow_blank=True, required=False, label=_('OS arch'))
|
||||
|
||||
gpu_model = serializers.CharField(allow_blank=True, required=False, label=_('GPU model'))
|
||||
|
||||
|
||||
category_gathered_serializer_map = {
|
||||
'host': HostGatheredInfoSerializer,
|
||||
'ds': HostGatheredInfoSerializer,
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user