mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-17 01:22:47 +00:00
Compare commits
33 Commits
pr@dev@fea
...
v4.9.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
558188da90 | ||
|
|
ad5460dab8 | ||
|
|
4d37dca0de | ||
|
|
2ca4002624 | ||
|
|
053d640e4c | ||
|
|
f3acc28ded | ||
|
|
25987545db | ||
|
|
6720ecc6e0 | ||
|
|
0b3a7bb020 | ||
|
|
56373e362b | ||
|
|
02fc045370 | ||
|
|
e4ac73896f | ||
|
|
1518f792d6 | ||
|
|
67277dd622 | ||
|
|
82e7f020ea | ||
|
|
f20b9e01ab | ||
|
|
8cf8a3701b | ||
|
|
7ba24293d1 | ||
|
|
f10114c9ed | ||
|
|
cf31cbfb07 | ||
|
|
0edad24d5d | ||
|
|
1f1c1a9157 | ||
|
|
6c9d271ae1 | ||
|
|
6ff852e225 | ||
|
|
baa75dc735 | ||
|
|
8a9f0436b8 | ||
|
|
a9620a3cbe | ||
|
|
769e7dc8a0 | ||
|
|
2a70449411 | ||
|
|
8df720f19e | ||
|
|
dabbb45f6e | ||
|
|
ce24c1c3fd | ||
|
|
3c54c82ce9 |
@@ -8,6 +8,4 @@ celerybeat.pid
|
||||
.vagrant/
|
||||
apps/xpack/.git
|
||||
.history/
|
||||
.idea
|
||||
.venv/
|
||||
.env
|
||||
.idea
|
||||
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -0,0 +1,4 @@
|
||||
*.mmdb filter=lfs diff=lfs merge=lfs -text
|
||||
*.mo filter=lfs diff=lfs merge=lfs -text
|
||||
*.ipdb filter=lfs diff=lfs merge=lfs -text
|
||||
leak_passwords.db filter=lfs diff=lfs merge=lfs -text
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "uv"
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "monday"
|
||||
time: "09:30"
|
||||
timezone: "Asia/Shanghai"
|
||||
target-branch: dev
|
||||
groups:
|
||||
python-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
target-branch: dev
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -46,6 +46,3 @@ test.py
|
||||
.test/
|
||||
*.mo
|
||||
apps.iml
|
||||
*.db
|
||||
*.mmdb
|
||||
*.ipdb
|
||||
|
||||
11
.prettierrc
11
.prettierrc
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"tabWidth": 4,
|
||||
"useTabs": false,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5",
|
||||
"bracketSpacing": true,
|
||||
"arrowParens": "avoid",
|
||||
"printWidth": 100,
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM jumpserver/core-base:20250827_025554 AS stage-build
|
||||
FROM jumpserver/core-base:20250415_032719 AS stage-build
|
||||
|
||||
ARG VERSION
|
||||
|
||||
@@ -33,7 +33,6 @@ ARG TOOLS=" \
|
||||
default-libmysqlclient-dev \
|
||||
openssh-client \
|
||||
sshpass \
|
||||
nmap \
|
||||
bubblewrap"
|
||||
|
||||
ARG APT_MIRROR=http://deb.debian.org
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
FROM python:3.11-slim-bullseye
|
||||
ARG TARGETARCH
|
||||
COPY --from=ghcr.io/astral-sh/uv:0.6.14 /uv /uvx /usr/local/bin/
|
||||
|
||||
# Install APT dependencies
|
||||
ARG DEPENDENCIES=" \
|
||||
ca-certificates \
|
||||
@@ -43,19 +43,18 @@ WORKDIR /opt/jumpserver
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
ENV ANSIBLE_COLLECTIONS_PATHS=/opt/py3/lib/python3.11/site-packages/ansible_collections
|
||||
ENV LANG=en_US.UTF-8 \
|
||||
PATH=/opt/py3/bin:$PATH
|
||||
|
||||
ENV UV_LINK_MODE=copy
|
||||
|
||||
RUN --mount=type=cache,target=/root/.cache \
|
||||
--mount=type=bind,source=poetry.lock,target=poetry.lock \
|
||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||
--mount=type=bind,source=requirements/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=utils/clean_site_packages.sh,target=clean_site_packages.sh \
|
||||
--mount=type=bind,source=requirements/collections.yml,target=collections.yml \
|
||||
--mount=type=bind,source=requirements/static_files.sh,target=utils/static_files.sh \
|
||||
set -ex \
|
||||
&& uv venv \
|
||||
&& uv pip install -i${PIP_MIRROR} -r pyproject.toml \
|
||||
&& ln -sf $(pwd)/.venv /opt/py3 \
|
||||
&& bash utils/static_files.sh \
|
||||
&& bash clean_site_packages.sh
|
||||
&& python3 -m venv /opt/py3 \
|
||||
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& poetry config virtualenvs.create false \
|
||||
&& poetry install --no-cache --only main \
|
||||
&& ansible-galaxy collection install -r collections.yml --force --ignore-certs \
|
||||
&& bash clean_site_packages.sh \
|
||||
&& poetry cache clear pypi --all
|
||||
|
||||
@@ -13,9 +13,7 @@ ARG TOOLS=" \
|
||||
nmap \
|
||||
telnet \
|
||||
vim \
|
||||
postgresql-client-13 \
|
||||
wget \
|
||||
poppler-utils"
|
||||
wget"
|
||||
|
||||
RUN set -ex \
|
||||
&& apt-get update \
|
||||
@@ -26,7 +24,11 @@ RUN set -ex \
|
||||
WORKDIR /opt/jumpserver
|
||||
|
||||
ARG PIP_MIRROR=https://pypi.org/simple
|
||||
|
||||
ENV POETRY_PYPI_MIRROR_URL=${PIP_MIRROR}
|
||||
COPY poetry.lock pyproject.toml ./
|
||||
RUN set -ex \
|
||||
&& uv pip install -i${PIP_MIRROR} --group xpack \
|
||||
&& playwright install chromium --with-deps --only-shell
|
||||
&& . /opt/py3/bin/activate \
|
||||
&& pip install poetry poetry-plugin-pypi-mirror -i ${PIP_MIRROR} \
|
||||
&& poetry install --only xpack \
|
||||
&& poetry cache clear pypi --all
|
||||
|
||||
|
||||
19
README.md
19
README.md
@@ -2,29 +2,27 @@
|
||||
<a name="readme-top"></a>
|
||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||
|
||||
## An open-source PAM platform (Bastion Host)
|
||||
## An open-source PAM tool (Bastion Host)
|
||||
|
||||
[![][license-shield]][license-link]
|
||||
[![][docs-shield]][docs-link]
|
||||
[![][deepwiki-shield]][deepwiki-link]
|
||||
[![][discord-shield]][discord-link]
|
||||
[![][docker-shield]][docker-link]
|
||||
[![][github-release-shield]][github-release-link]
|
||||
[![][github-stars-shield]][github-stars-link]
|
||||
|
||||
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md) · [한국어](/readmes/README.ko.md)
|
||||
[English](/README.md) · [中文(简体)](/readmes/README.zh-hans.md) · [中文(繁體)](/readmes/README.zh-hant.md) · [日本語](/readmes/README.ja.md) · [Português (Brasil)](/readmes/README.pt-br.md) · [Español](/readmes/README.es.md) · [Русский](/readmes/README.ru.md)
|
||||
|
||||
</div>
|
||||
<br/>
|
||||
|
||||
## What is JumpServer?
|
||||
|
||||
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://www.jumpserver.com/images/jumpserver-arch-light.png">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://www.jumpserver.com/images/jumpserver-arch-dark.png">
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://github.com/user-attachments/assets/28676212-2bc4-4a9f-ae10-3be9320647e3">
|
||||
<img src="https://github.com/user-attachments/assets/dd612f3d-c958-4f84-b164-f31b75454d7f" alt="Theme-based Image">
|
||||
</picture>
|
||||
|
||||
@@ -85,8 +83,6 @@ JumpServer consists of multiple key components, which collectively form the func
|
||||
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
|
||||
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
|
||||
|
||||
## Third-party projects
|
||||
- [jumpserver-grafana-dashboard](https://github.com/acerrah/jumpserver-grafana-dashboard) JumpServer with grafana dashboard
|
||||
|
||||
## Contributing
|
||||
|
||||
@@ -105,7 +101,6 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
<!-- JumpServer official link -->
|
||||
[docs-link]: https://jumpserver.com/docs
|
||||
[discord-link]: https://discord.com/invite/W6vYXmAQG2
|
||||
[deepwiki-link]: https://deepwiki.com/jumpserver/jumpserver/
|
||||
[contributing-link]: https://github.com/jumpserver/jumpserver/blob/dev/CONTRIBUTING.md
|
||||
|
||||
<!-- JumpServer Other link-->
|
||||
@@ -116,10 +111,8 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
[github-issues-link]: https://github.com/jumpserver/jumpserver/issues
|
||||
|
||||
<!-- Shield link-->
|
||||
[docs-shield]: https://img.shields.io/badge/documentation-148F76
|
||||
[github-release-shield]: https://img.shields.io/github/v/release/jumpserver/jumpserver
|
||||
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
|
||||
[github-stars-shield]: https://img.shields.io/github/stars/jumpserver/jumpserver?color=%231890FF&style=flat-square
|
||||
[docker-shield]: https://img.shields.io/docker/pulls/jumpserver/jms_all.svg
|
||||
[license-shield]: https://img.shields.io/github/license/jumpserver/jumpserver
|
||||
[deepwiki-shield]: https://img.shields.io/badge/deepwiki-devin?color=blue
|
||||
[discord-shield]: https://img.shields.io/discord/1194233267294052363?style=flat&logo=discord&logoColor=%23f5f5f5&labelColor=%235462eb&color=%235462eb
|
||||
|
||||
@@ -41,8 +41,8 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
'partial_update': ['accounts.change_account'],
|
||||
'su_from_accounts': 'accounts.view_account',
|
||||
'clear_secret': 'accounts.change_account',
|
||||
'move_to_assets': 'accounts.delete_account',
|
||||
'copy_to_assets': 'accounts.add_account',
|
||||
'move_to_assets': 'accounts.create_account',
|
||||
'copy_to_assets': 'accounts.create_account',
|
||||
}
|
||||
export_as_zip = True
|
||||
|
||||
@@ -78,25 +78,18 @@ class AccountViewSet(OrgBulkModelViewSet):
|
||||
permission_classes=[IsValidUser]
|
||||
)
|
||||
def username_suggestions(self, request, *args, **kwargs):
|
||||
raw_asset_ids = request.data.get('assets', [])
|
||||
asset_ids = request.data.get('assets', [])
|
||||
node_ids = request.data.get('nodes', [])
|
||||
username = request.data.get('username', '')
|
||||
|
||||
asset_ids = set(raw_asset_ids)
|
||||
|
||||
accounts = Account.objects.all()
|
||||
if node_ids:
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
node_asset_qs = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
asset_ids |= {str(u) for u in node_asset_qs}
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
asset_ids.extend(node_asset_ids)
|
||||
|
||||
if asset_ids:
|
||||
through = Asset.directory_services.through
|
||||
ds_qs = through.objects.filter(asset_id__in=asset_ids) \
|
||||
.values_list('directoryservice_id', flat=True)
|
||||
asset_ids |= {str(u) for u in ds_qs}
|
||||
accounts = Account.objects.filter(asset_id__in=list(asset_ids))
|
||||
else:
|
||||
accounts = Account.objects.all()
|
||||
accounts = accounts.filter(asset_id__in=list(set(asset_ids)))
|
||||
|
||||
if username:
|
||||
accounts = accounts.filter(username__icontains=username)
|
||||
@@ -190,7 +183,6 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
|
||||
rbac_perms = {
|
||||
'GET': 'accounts.view_accountsecret',
|
||||
}
|
||||
queryset = Account.history.model.objects.none()
|
||||
|
||||
@lazyproperty
|
||||
def account(self) -> Account:
|
||||
|
||||
@@ -62,7 +62,8 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
|
||||
)
|
||||
def get_once_secret(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
return Response(data={'id': instance.id, 'secret': instance.secret})
|
||||
secret = instance.get_secret()
|
||||
return Response(data={'id': instance.id, 'secret': secret})
|
||||
|
||||
@action(['GET'], detail=False, url_path='account-secret',
|
||||
permission_classes=[RBACPermission])
|
||||
|
||||
@@ -20,7 +20,7 @@ __all__ = ['PamDashboardApi']
|
||||
class PamDashboardApi(APIView):
|
||||
http_method_names = ['get']
|
||||
rbac_perms = {
|
||||
'GET': 'rbac.view_pam',
|
||||
'GET': 'accounts.view_account',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -43,7 +43,6 @@ class AccountTemplateViewSet(OrgBulkModelViewSet):
|
||||
search_fields = ('username', 'name')
|
||||
serializer_classes = {
|
||||
'default': serializers.AccountTemplateSerializer,
|
||||
'retrieve': serializers.AccountDetailTemplateSerializer,
|
||||
}
|
||||
rbac_perms = {
|
||||
'su_from_account_templates': 'accounts.view_accounttemplate',
|
||||
|
||||
@@ -12,8 +12,6 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
|
||||
filterset_fields = ('alias',)
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return VirtualAccount.objects.none()
|
||||
return VirtualAccount.get_or_init_queryset()
|
||||
|
||||
def get_object(self, ):
|
||||
|
||||
@@ -41,7 +41,6 @@ class AutomationAssetsListApi(generics.ListAPIView):
|
||||
|
||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
@@ -60,7 +59,6 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||
|
||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||
model = BaseAutomation
|
||||
queryset = BaseAutomation.objects.all()
|
||||
serializer_class = serializers.UpdateAssetSerializer
|
||||
http_method_names = ['patch']
|
||||
|
||||
|
||||
@@ -6,13 +6,10 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from accounts import serializers
|
||||
from accounts.const import (
|
||||
AutomationTypes, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
from accounts.filters import ChangeSecretRecordFilterSet, ChangeSecretStatusFilterSet
|
||||
from accounts.models import ChangeSecretAutomation, ChangeSecretRecord, Account
|
||||
from accounts.const import AutomationTypes, ChangeSecretRecordStatusChoice
|
||||
from accounts.filters import ChangeSecretRecordFilterSet
|
||||
from accounts.models import ChangeSecretAutomation, ChangeSecretRecord
|
||||
from accounts.tasks import execute_automation_record_task
|
||||
from accounts.utils import account_secret_task_status
|
||||
from authentication.permissions import UserConfirmation, ConfirmType
|
||||
from common.permissions import IsValidLicense
|
||||
from orgs.mixins.api import OrgBulkModelViewSet, OrgGenericViewSet
|
||||
@@ -26,7 +23,7 @@ __all__ = [
|
||||
'ChangeSecretAutomationViewSet', 'ChangeSecretRecordViewSet',
|
||||
'ChangSecretExecutionViewSet', 'ChangSecretAssetsListApi',
|
||||
'ChangSecretRemoveAssetApi', 'ChangSecretAddAssetApi',
|
||||
'ChangSecretNodeAddRemoveApi', 'ChangeSecretStatusViewSet'
|
||||
'ChangSecretNodeAddRemoveApi'
|
||||
]
|
||||
|
||||
|
||||
@@ -97,13 +94,12 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
||||
def execute(self, request, *args, **kwargs):
|
||||
record_ids = request.data.get('record_ids')
|
||||
records = self.get_queryset().filter(id__in=record_ids)
|
||||
if not records.exists():
|
||||
execution_count = records.values_list('execution_id', flat=True).distinct().count()
|
||||
if execution_count != 1:
|
||||
return Response(
|
||||
{'detail': 'No valid records found'},
|
||||
{'detail': 'Only one execution is allowed to execute'},
|
||||
status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
|
||||
record_ids = [str(_id) for _id in records.values_list('id', flat=True)]
|
||||
task = execute_automation_record_task.delay(record_ids, self.tp)
|
||||
return Response({'task': task.id}, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -154,27 +150,7 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||
|
||||
|
||||
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = ChangeSecretAutomation
|
||||
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
||||
|
||||
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
||||
perm_model = ChangeSecretAutomation
|
||||
filterset_class = ChangeSecretStatusFilterSet
|
||||
serializer_class = serializers.ChangeSecretAccountSerializer
|
||||
search_fields = ('username',)
|
||||
|
||||
permission_classes = [RBACPermission, IsValidLicense]
|
||||
http_method_names = ["get", "delete", "options"]
|
||||
|
||||
def get_queryset(self):
|
||||
account_ids = list(account_secret_task_status.account_ids)
|
||||
return Account.objects.filter(id__in=account_ids).select_related('asset')
|
||||
|
||||
def bulk_destroy(self, request, *args, **kwargs):
|
||||
account_ids = request.data.get('account_ids')
|
||||
if isinstance(account_ids, str):
|
||||
account_ids = [account_ids]
|
||||
for _id in account_ids:
|
||||
account_secret_task_status.clear(_id)
|
||||
return Response(status=status.HTTP_200_OK)
|
||||
|
||||
@@ -62,8 +62,7 @@ class ChangeSecretDashboardApi(APIView):
|
||||
status_counts = defaultdict(lambda: defaultdict(int))
|
||||
|
||||
for date_finished, status in results:
|
||||
dt_local = timezone.localtime(date_finished)
|
||||
date_str = str(dt_local.date())
|
||||
date_str = str(date_finished.date())
|
||||
if status == ChangeSecretRecordStatusChoice.failed:
|
||||
status_counts[date_str]['failed'] += 1
|
||||
elif status == ChangeSecretRecordStatusChoice.success:
|
||||
@@ -91,10 +90,10 @@ class ChangeSecretDashboardApi(APIView):
|
||||
|
||||
def get_change_secret_asset_queryset(self):
|
||||
qs = self.change_secrets_queryset
|
||||
node_ids = qs.values_list('nodes', flat=True).distinct()
|
||||
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
|
||||
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
|
||||
nodes = Node.objects.filter(id__in=node_ids)
|
||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
|
||||
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
|
||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||
return Asset.objects.filter(id__in=asset_ids)
|
||||
|
||||
|
||||
@@ -45,10 +45,10 @@ class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
|
||||
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||
rbac_perms = (
|
||||
("list", "accounts.view_checkaccountexecution"),
|
||||
("retrieve", "accounts.view_checkaccountexecution"),
|
||||
("retrieve", "accounts.view_checkaccountsexecution"),
|
||||
("create", "accounts.add_checkaccountexecution"),
|
||||
("adhoc", "accounts.add_checkaccountexecution"),
|
||||
("report", "accounts.view_checkaccountexecution"),
|
||||
("report", "accounts.view_checkaccountsexecution"),
|
||||
)
|
||||
ordering = ("-date_created",)
|
||||
tp = AutomationTypes.check_account
|
||||
@@ -150,9 +150,6 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
|
||||
http_method_names = ['get', 'options']
|
||||
|
||||
def get_queryset(self):
|
||||
if getattr(self, "swagger_fake_view", False):
|
||||
return CheckAccountEngine.objects.none()
|
||||
|
||||
return CheckAccountEngine.get_default_engines()
|
||||
|
||||
def filter_queryset(self, queryset: list):
|
||||
|
||||
@@ -63,10 +63,12 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||
|
||||
|
||||
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||
model = PushAccountAutomation
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||
|
||||
@@ -5,13 +5,12 @@ from django.utils import timezone
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.automations.methods import platform_automation_methods
|
||||
from accounts.const import SSHKeyStrategy, SecretStrategy, SecretType, ChangeSecretRecordStatusChoice, \
|
||||
ChangeSecretAccountStatus
|
||||
from accounts.const import SSHKeyStrategy, SecretStrategy, SecretType, ChangeSecretRecordStatusChoice
|
||||
from accounts.models import BaseAccountQuerySet
|
||||
from accounts.utils import SecretGenerator, account_secret_task_status
|
||||
from accounts.utils import SecretGenerator
|
||||
from assets.automations.base.manager import BasePlaybookManager
|
||||
from assets.const import HostTypes
|
||||
from common.db.utils import safe_atomic_db_connection
|
||||
from common.db.utils import safe_db_connection
|
||||
from common.utils import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
@@ -37,7 +36,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
)
|
||||
self.account_ids = self.execution.snapshot['accounts']
|
||||
self.record_map = self.execution.snapshot.get('record_map', {}) # 这个是某个失败的记录重试
|
||||
self.name_record_mapper = {} # 做个映射,方便后面处理
|
||||
self.name_recorder_mapper = {} # 做个映射,方便后面处理
|
||||
|
||||
def gen_account_inventory(self, account, asset, h, path_dir):
|
||||
raise NotImplementedError
|
||||
@@ -105,10 +104,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||
return h
|
||||
|
||||
def add_extra_params(self, host, **kwargs):
|
||||
host['ssh_params'] = {}
|
||||
return host
|
||||
|
||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||
host = super().host_callback(
|
||||
host, asset=asset, account=account, automation=automation,
|
||||
@@ -117,14 +112,10 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
if host.get('error'):
|
||||
return host
|
||||
|
||||
host = self.add_extra_params(host, automation=automation)
|
||||
host['check_conn_after_change'] = self.execution.snapshot.get('check_conn_after_change', True)
|
||||
host['ssh_params'] = {}
|
||||
|
||||
accounts = self.get_accounts(account)
|
||||
existing_ids = set(map(str, accounts.values_list('id', flat=True)))
|
||||
missing_ids = set(map(str, self.account_ids)) - existing_ids
|
||||
|
||||
for account_id in missing_ids:
|
||||
self.clear_account_queue_status(account_id)
|
||||
|
||||
error_msg = _("No pending accounts found")
|
||||
if not accounts:
|
||||
print(f'{asset}: {error_msg}')
|
||||
@@ -141,50 +132,31 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
for account in accounts:
|
||||
h = deepcopy(host)
|
||||
h['name'] += '(' + account.username + ')' # To distinguish different accounts
|
||||
|
||||
account_status = account_secret_task_status.get_status(account.id)
|
||||
if account_status == ChangeSecretAccountStatus.PROCESSING:
|
||||
h['error'] = f'Account is already being processed, skipping: {account}'
|
||||
inventory_hosts.append(h)
|
||||
continue
|
||||
|
||||
try:
|
||||
h, record = self.gen_account_inventory(account, asset, h, path_dir)
|
||||
h['check_conn_after_change'] = record.execution.snapshot.get('check_conn_after_change', True)
|
||||
account_secret_task_status.set_status(
|
||||
account.id,
|
||||
ChangeSecretAccountStatus.PROCESSING,
|
||||
metadata={'execution_id': self.execution.id}
|
||||
)
|
||||
h = self.gen_account_inventory(account, asset, h, path_dir)
|
||||
except Exception as e:
|
||||
h['error'] = str(e)
|
||||
self.clear_account_queue_status(account.id)
|
||||
|
||||
inventory_hosts.append(h)
|
||||
|
||||
return inventory_hosts
|
||||
|
||||
@staticmethod
|
||||
def save_record(record):
|
||||
record.save(update_fields=['error', 'status', 'date_finished'])
|
||||
|
||||
@staticmethod
|
||||
def clear_account_queue_status(account_id):
|
||||
account_secret_task_status.clear(account_id)
|
||||
def save_record(recorder):
|
||||
recorder.save(update_fields=['error', 'status', 'date_finished'])
|
||||
|
||||
def on_host_success(self, host, result):
|
||||
record = self.name_record_mapper.get(host)
|
||||
if not record:
|
||||
recorder = self.name_recorder_mapper.get(host)
|
||||
if not recorder:
|
||||
return
|
||||
record.status = ChangeSecretRecordStatusChoice.success.value
|
||||
record.date_finished = timezone.now()
|
||||
recorder.status = ChangeSecretRecordStatusChoice.success.value
|
||||
recorder.date_finished = timezone.now()
|
||||
|
||||
account = record.account
|
||||
account = recorder.account
|
||||
if not account:
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
|
||||
account.secret = getattr(record, 'new_secret', account.secret)
|
||||
account.secret = getattr(recorder, 'new_secret', account.secret)
|
||||
account.date_updated = timezone.now()
|
||||
account.date_change_secret = timezone.now()
|
||||
account.change_secret_status = ChangeSecretRecordStatusChoice.success
|
||||
@@ -198,19 +170,18 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
)
|
||||
super().on_host_success(host, result)
|
||||
|
||||
with safe_atomic_db_connection():
|
||||
with safe_db_connection():
|
||||
account.save(update_fields=['secret', 'date_updated', 'date_change_secret', 'change_secret_status'])
|
||||
self.save_record(record)
|
||||
self.clear_account_queue_status(account.id)
|
||||
self.save_record(recorder)
|
||||
|
||||
def on_host_error(self, host, error, result):
|
||||
record = self.name_record_mapper.get(host)
|
||||
if not record:
|
||||
recorder = self.name_recorder_mapper.get(host)
|
||||
if not recorder:
|
||||
return
|
||||
record.status = ChangeSecretRecordStatusChoice.failed.value
|
||||
record.date_finished = timezone.now()
|
||||
record.error = error
|
||||
account = record.account
|
||||
recorder.status = ChangeSecretRecordStatusChoice.failed.value
|
||||
recorder.date_finished = timezone.now()
|
||||
recorder.error = error
|
||||
account = recorder.account
|
||||
if not account:
|
||||
print("Account not found, deleted ?")
|
||||
return
|
||||
@@ -221,13 +192,12 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
||||
self.summary['fail_accounts'] += 1
|
||||
self.result['fail_accounts'].append(
|
||||
{
|
||||
"asset": str(record.asset),
|
||||
"username": record.account.username,
|
||||
"asset": str(recorder.asset),
|
||||
"username": recorder.account.username,
|
||||
}
|
||||
)
|
||||
super().on_host_error(host, error, result)
|
||||
|
||||
with safe_atomic_db_connection():
|
||||
with safe_db_connection():
|
||||
account.save(update_fields=['change_secret_status', 'date_change_secret', 'date_updated'])
|
||||
self.save_record(record)
|
||||
self.clear_account_queue_status(account.id)
|
||||
self.save_record(recorder)
|
||||
|
||||
@@ -53,6 +53,4 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
when: check_conn_after_change
|
||||
@@ -39,8 +39,7 @@
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||
append_privs: "{{ db_name != '' | bool }}"
|
||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
|
||||
@@ -56,5 +56,3 @@
|
||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -8,7 +8,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -24,7 +24,3 @@ i18n:
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -9,7 +9,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -25,8 +25,3 @@ i18n:
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
||||
@@ -9,24 +9,19 @@ priority: 49
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
|
||||
i18n:
|
||||
Windows account change secret rdp verify:
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密(最后使用 Python 模块 pyfreerdp 验证账号的可连接性)'
|
||||
ja: 'Ansible モジュール win_user を使用して Windows アカウントのパスワードを変更します (最後に Python モジュール pyfreerdp を使用してアカウントの接続を確認します)'
|
||||
en: 'Use the Ansible module win_user to change the Windows account password (finally use the Python module pyfreerdp to verify the account connectivity)'
|
||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密 RDP 协议测试最后的可连接性'
|
||||
ja: 'Ansibleモジュールwin_userはWindowsアカウントの改密RDPプロトコルテストの最後の接続性を実行する'
|
||||
en: 'Using the Ansible module win_user performs Windows account encryption RDP protocol testing for final connectivity'
|
||||
|
||||
Params groups help text:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
|
||||
@@ -5,9 +5,6 @@ from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from xlsxwriter import Workbook
|
||||
|
||||
from assets.automations.methods import platform_automation_methods as asset_methods
|
||||
from assets.const import AutomationTypes as AssetAutomationTypes
|
||||
from accounts.automations.methods import platform_automation_methods as account_methods
|
||||
from accounts.const import (
|
||||
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
||||
)
|
||||
@@ -25,22 +22,6 @@ logger = get_logger(__name__)
|
||||
class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
ansible_account_prefer = ''
|
||||
|
||||
def get_method_id_meta_mapper(self):
|
||||
return {
|
||||
method["id"]: method for method in self.platform_automation_methods
|
||||
}
|
||||
|
||||
@property
|
||||
def platform_automation_methods(self):
|
||||
return asset_methods + account_methods
|
||||
|
||||
def add_extra_params(self, host, **kwargs):
|
||||
host = super().add_extra_params(host, **kwargs)
|
||||
automation = kwargs.get('automation')
|
||||
for extra_type in [AssetAutomationTypes.ping, AutomationTypes.verify_account]:
|
||||
host[f"{extra_type}_params"] = self.get_params(automation, extra_type)
|
||||
return host
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
return AutomationTypes.change_secret
|
||||
@@ -49,28 +30,28 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
record = self.get_or_create_record(asset, account, h['name'])
|
||||
new_secret, private_key_path = self.handle_ssh_secret(account.secret_type, record.new_secret, path_dir)
|
||||
h = self.gen_inventory(h, account, new_secret, private_key_path, asset)
|
||||
return h, record
|
||||
return h
|
||||
|
||||
def get_or_create_record(self, asset, account, name):
|
||||
asset_account_id = f'{asset.id}-{account.id}'
|
||||
|
||||
if asset_account_id in self.record_map:
|
||||
record_id = self.record_map[asset_account_id]
|
||||
record = ChangeSecretRecord.objects.filter(id=record_id).first()
|
||||
recorder = ChangeSecretRecord.objects.filter(id=record_id).first()
|
||||
else:
|
||||
new_secret = self.get_secret(account)
|
||||
record = self.create_record(asset, account, new_secret)
|
||||
recorder = self.create_record(asset, account, new_secret)
|
||||
|
||||
self.name_record_mapper[name] = record
|
||||
return record
|
||||
self.name_recorder_mapper[name] = recorder
|
||||
return recorder
|
||||
|
||||
def create_record(self, asset, account, new_secret):
|
||||
record = ChangeSecretRecord(
|
||||
recorder = ChangeSecretRecord(
|
||||
asset=asset, account=account, execution=self.execution,
|
||||
old_secret=account.secret, new_secret=new_secret,
|
||||
comment=f'{account.username}@{asset.address}'
|
||||
)
|
||||
return record
|
||||
return recorder
|
||||
|
||||
def check_secret(self):
|
||||
if self.secret_strategy == SecretStrategy.custom \
|
||||
@@ -80,10 +61,10 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def get_summary(records):
|
||||
def get_summary(recorders):
|
||||
total, succeed, failed = 0, 0, 0
|
||||
for record in records:
|
||||
if record.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
for recorder in recorders:
|
||||
if recorder.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
succeed += 1
|
||||
else:
|
||||
failed += 1
|
||||
@@ -92,8 +73,8 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
return summary
|
||||
|
||||
def print_summary(self):
|
||||
records = list(self.name_record_mapper.values())
|
||||
summary = self.get_summary(records)
|
||||
recorders = list(self.name_recorder_mapper.values())
|
||||
summary = self.get_summary(recorders)
|
||||
print('\n\n' + '-' * 80)
|
||||
plan_execution_end = _('Plan execution end')
|
||||
print('{} {}\n'.format(plan_execution_end, local_now_filename()))
|
||||
@@ -105,7 +86,7 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
if self.secret_type and not self.check_secret():
|
||||
return
|
||||
|
||||
records = list(self.name_record_mapper.values())
|
||||
recorders = list(self.name_recorder_mapper.values())
|
||||
if self.record_map:
|
||||
return
|
||||
|
||||
@@ -117,17 +98,17 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
for user in recipients:
|
||||
ChangeSecretReportMsg(user, context).publish()
|
||||
|
||||
if not records:
|
||||
if not recorders:
|
||||
return
|
||||
|
||||
summary = self.get_summary(records)
|
||||
self.send_record_mail(recipients, records, summary)
|
||||
summary = self.get_summary(recorders)
|
||||
self.send_recorder_mail(recipients, recorders, summary)
|
||||
|
||||
def send_record_mail(self, recipients, records, summary):
|
||||
def send_recorder_mail(self, recipients, recorders, summary):
|
||||
name = self.execution.snapshot['name']
|
||||
path = os.path.join(os.path.dirname(settings.BASE_DIR), 'tmp')
|
||||
filename = os.path.join(path, f'{name}-{local_now_filename()}-{time.time()}.xlsx')
|
||||
if not self.create_file(records, filename):
|
||||
if not self.create_file(recorders, filename):
|
||||
return
|
||||
|
||||
for user in recipients:
|
||||
@@ -140,9 +121,9 @@ class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||
os.remove(filename)
|
||||
|
||||
@staticmethod
|
||||
def create_file(records, filename):
|
||||
def create_file(recorders, filename):
|
||||
serializer_cls = ChangeSecretRecordBackUpSerializer
|
||||
serializer = serializer_cls(records, many=True)
|
||||
serializer = serializer_cls(recorders, many=True)
|
||||
|
||||
header = [str(v.label) for v in serializer.child.fields.values()]
|
||||
rows = [[str(i) for i in row.values()] for row in serializer.data]
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
- hosts: website
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Test privileged account
|
||||
website_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
steps: "{{ ping_params.steps }}"
|
||||
load_state: "{{ ping_params.load_state }}"
|
||||
|
||||
- name: "Change {{ account.username }} password"
|
||||
website_user:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
steps: "{{ params.steps }}"
|
||||
load_state: "{{ params.load_state }}"
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
ignore_errors: true
|
||||
register: change_secret_result
|
||||
|
||||
- name: "Verify {{ account.username }} password"
|
||||
website_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
steps: "{{ verify_account_params.steps }}"
|
||||
load_state: "{{ verify_account_params.load_state }}"
|
||||
when:
|
||||
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||
delegate_to: localhost
|
||||
@@ -1,51 +0,0 @@
|
||||
id: change_account_website
|
||||
name: "{{ 'Website account change secret' | trans }}"
|
||||
category: web
|
||||
type:
|
||||
- website
|
||||
method: change_secret
|
||||
priority: 50
|
||||
params:
|
||||
- name: load_state
|
||||
type: choice
|
||||
label: "{{ 'Load state' | trans }}"
|
||||
choices:
|
||||
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||
default: 'load'
|
||||
- name: steps
|
||||
type: list
|
||||
default: [ ]
|
||||
label: "{{ 'Steps' | trans }}"
|
||||
help_text: "{{ 'Params step help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
Website account change secret:
|
||||
zh: 使用 Playwright 模拟浏览器变更账号密码
|
||||
ja: Playwright を使用してブラウザをシミュレートし、アカウントのパスワードを変更します
|
||||
en: Use Playwright to simulate a browser for account password change.
|
||||
Load state:
|
||||
zh: 加载状态检测
|
||||
en: Load state detection
|
||||
ja: ロード状態の検出
|
||||
Steps:
|
||||
zh: 步骤
|
||||
en: Steps
|
||||
ja: 手順
|
||||
Network idle:
|
||||
zh: 网络空闲
|
||||
en: Network idle
|
||||
ja: ネットワークが空いた状態
|
||||
Dom content loaded:
|
||||
zh: 文档内容加载完成
|
||||
en: Dom content loaded
|
||||
ja: ドキュメントの内容がロードされた状態
|
||||
Load completed:
|
||||
zh: 全部加载完成
|
||||
en: All load completed
|
||||
ja: すべてのロードが完了した状態
|
||||
Params step help text:
|
||||
zh: 根据配置决定任务执行步骤
|
||||
ja: 設定に基づいてタスクの実行ステップを決定する
|
||||
en: Determine task execution steps based on configuration
|
||||
@@ -0,0 +1,3 @@
|
||||
version https://git-lfs.github.com/spec/v1
|
||||
oid sha256:a2805a0264fc07ae597704841ab060edef8bf74654f525bc778cb9195d8cad0e
|
||||
size 2547712
|
||||
@@ -12,16 +12,13 @@ from accounts.models import Account, AccountRisk, RiskChoice
|
||||
from assets.automations.base.manager import BaseManager
|
||||
from common.const import ConfirmOrIgnore
|
||||
from common.decorators import bulk_create_decorator, bulk_update_decorator
|
||||
from settings.models import LeakPasswords
|
||||
|
||||
|
||||
# 已设置手动 finish
|
||||
@bulk_create_decorator(AccountRisk)
|
||||
def create_risk(data):
|
||||
return AccountRisk(**data)
|
||||
|
||||
|
||||
# 已设置手动 finish
|
||||
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
|
||||
def update_risk(risk):
|
||||
return risk
|
||||
@@ -160,8 +157,10 @@ class CheckLeakHandler(BaseCheckHandler):
|
||||
if not account.secret:
|
||||
return False
|
||||
|
||||
is_exist = LeakPasswords.objects.using('sqlite').filter(password=account.secret).exists()
|
||||
return is_exist
|
||||
sql = 'SELECT 1 FROM passwords WHERE password = ? LIMIT 1'
|
||||
self.cursor.execute(sql, (account.secret,))
|
||||
leak = self.cursor.fetchone() is not None
|
||||
return leak
|
||||
|
||||
def clean(self):
|
||||
self.cursor.close()
|
||||
@@ -219,9 +218,6 @@ class CheckAccountManager(BaseManager):
|
||||
"details": [{"datetime": now, 'type': 'init'}],
|
||||
})
|
||||
|
||||
create_risk.finish()
|
||||
update_risk.finish()
|
||||
|
||||
def pre_run(self):
|
||||
super().pre_run()
|
||||
self.assets = self.execution.get_all_assets()
|
||||
@@ -240,11 +236,6 @@ class CheckAccountManager(BaseManager):
|
||||
|
||||
print("Check: {} => {}".format(account, msg))
|
||||
if not error:
|
||||
AccountRisk.objects.filter(
|
||||
asset=account.asset,
|
||||
username=account.username,
|
||||
risk=handler.risk
|
||||
).delete()
|
||||
continue
|
||||
self.add_risk(handler.risk, account)
|
||||
self.commit_risks(_assets)
|
||||
@@ -274,7 +265,7 @@ class CheckAccountManager(BaseManager):
|
||||
handler.clean()
|
||||
|
||||
def get_report_subject(self):
|
||||
return _("Check account report of {}").format(self.execution.id)
|
||||
return "Check account report of %s" % self.execution.id
|
||||
|
||||
def get_report_template(self):
|
||||
return "accounts/check_account_report.html"
|
||||
|
||||
@@ -30,16 +30,6 @@ common_risk_items = [
|
||||
diff_items = risk_items + common_risk_items
|
||||
|
||||
|
||||
@bulk_create_decorator(AccountRisk)
|
||||
def _create_risk(data):
|
||||
return AccountRisk(**data)
|
||||
|
||||
|
||||
@bulk_update_decorator(AccountRisk, update_fields=["details"])
|
||||
def _update_risk(account):
|
||||
return account
|
||||
|
||||
|
||||
def format_datetime(value):
|
||||
if isinstance(value, timezone.datetime):
|
||||
return value.strftime("%Y-%m-%d %H:%M:%S")
|
||||
@@ -151,17 +141,25 @@ class AnalyseAccountRisk:
|
||||
found = assets_risks.get(key)
|
||||
|
||||
if not found:
|
||||
_create_risk(dict(**d, details=[detail]))
|
||||
self._create_risk(dict(**d, details=[detail]))
|
||||
continue
|
||||
|
||||
found.details.append(detail)
|
||||
_update_risk(found)
|
||||
self._update_risk(found)
|
||||
|
||||
@bulk_create_decorator(AccountRisk)
|
||||
def _create_risk(self, data):
|
||||
return AccountRisk(**data)
|
||||
|
||||
@bulk_update_decorator(AccountRisk, update_fields=["details"])
|
||||
def _update_risk(self, account):
|
||||
return account
|
||||
|
||||
def lost_accounts(self, asset, lost_users):
|
||||
if not self.check_risk:
|
||||
return
|
||||
for user in lost_users:
|
||||
_create_risk(
|
||||
self._create_risk(
|
||||
dict(
|
||||
asset_id=str(asset.id),
|
||||
username=user,
|
||||
@@ -178,7 +176,7 @@ class AnalyseAccountRisk:
|
||||
self._analyse_item_changed(ga, d)
|
||||
if not sys_found:
|
||||
basic = {"asset": asset, "username": d["username"], 'gathered_account': ga}
|
||||
_create_risk(
|
||||
self._create_risk(
|
||||
dict(
|
||||
**basic,
|
||||
risk=RiskChoice.new_found,
|
||||
@@ -390,7 +388,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
self.update_gathered_account(ori_account, d)
|
||||
ori_found = username in ori_users
|
||||
need_analyser_gather_account.append((asset, ga, d, ori_found))
|
||||
# 这里顺序不能调整,risk 外键关联了 gathered_account 主键 id,所以在创建 risk 需要保证 gathered_account 已经创建完成
|
||||
self.create_gathered_account.finish()
|
||||
self.update_gathered_account.finish()
|
||||
for analysis_data in need_analyser_gather_account:
|
||||
@@ -406,9 +403,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
||||
present=True
|
||||
)
|
||||
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
|
||||
_update_risk.finish()
|
||||
_create_risk.finish()
|
||||
|
||||
time.sleep(0.5)
|
||||
|
||||
def get_report_template(self):
|
||||
|
||||
@@ -20,11 +20,10 @@
|
||||
become_private_key_path: "{{ jms_custom_become_private_key_path | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
register: ping_info
|
||||
delegate_to: localhost
|
||||
|
||||
- name: Push asset password (paramiko)
|
||||
- name: Change asset password (paramiko)
|
||||
custom_command:
|
||||
login_user: "{{ jms_account.username }}"
|
||||
login_password: "{{ jms_account.secret }}"
|
||||
@@ -40,10 +39,7 @@
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
commands: "{{ params.commands }}"
|
||||
answers: "{{ params.answers }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
delay_time: "{{ params.delay_time | default(2) }}"
|
||||
prompt: "{{ params.prompt | default('.*') }}"
|
||||
first_conn_delay_time: "{{ first_conn_delay_time | default(0.5) }}"
|
||||
ignore_errors: true
|
||||
when: ping_info is succeeded and check_conn_after_change
|
||||
register: change_info
|
||||
@@ -62,6 +58,5 @@
|
||||
become_private_key_path: "{{ account.become.ansible_ssh_private_key_file | default(None) }}"
|
||||
old_ssh_version: "{{ jms_asset.old_ssh_version | default(False) }}"
|
||||
gateway_args: "{{ jms_asset.ansible_ssh_common_args | default(None) }}"
|
||||
recv_timeout: "{{ params.recv_timeout | default(30) }}"
|
||||
delegate_to: localhost
|
||||
when: check_conn_after_change
|
||||
@@ -10,30 +10,10 @@ protocol: ssh
|
||||
priority: 50
|
||||
params:
|
||||
- name: commands
|
||||
type: text
|
||||
type: list
|
||||
label: "{{ 'Params commands label' | trans }}"
|
||||
default: ''
|
||||
default: [ '' ]
|
||||
help_text: "{{ 'Params commands help text' | trans }}"
|
||||
- name: recv_timeout
|
||||
type: int
|
||||
label: "{{ 'Params recv_timeout label' | trans }}"
|
||||
default: 30
|
||||
help_text: "{{ 'Params recv_timeout help text' | trans }}"
|
||||
- name: delay_time
|
||||
type: int
|
||||
label: "{{ 'Params delay_time label' | trans }}"
|
||||
default: 2
|
||||
help_text: "{{ 'Params delay_time help text' | trans }}"
|
||||
- name: prompt
|
||||
type: str
|
||||
label: "{{ 'Params prompt label' | trans }}"
|
||||
default: '.*'
|
||||
help_text: "{{ 'Params prompt help text' | trans }}"
|
||||
- name: answers
|
||||
type: text
|
||||
label: "{{ 'Params answer label' | trans }}"
|
||||
default: '.*'
|
||||
help_text: "{{ 'Params answer help text' | trans }}"
|
||||
|
||||
i18n:
|
||||
SSH account push:
|
||||
@@ -42,91 +22,11 @@ i18n:
|
||||
en: 'Custom push using SSH command line'
|
||||
|
||||
Params commands help text:
|
||||
zh: |
|
||||
请将命令中的指定位置改成特殊符号 <br />
|
||||
1. 推送账号 -> {username} <br />
|
||||
2. 推送密码 -> {password} <br />
|
||||
3. 登录用户密码 -> {login_password} <br />
|
||||
<strong>多条命令使用换行分割,</strong>执行任务时系统会根据特殊符号替换真实数据。<br />
|
||||
比如针对 Cisco 主机进行推送,一般需要配置五条命令:<br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
ja: |
|
||||
コマンド内の指定された位置を特殊記号に変更してください。<br />
|
||||
新しいパスワード(アカウント押す) -> {username} <br />
|
||||
新しいパスワード(パスワード押す) -> {password} <br />
|
||||
ログインユーザーパスワード -> {login_password} <br />
|
||||
<strong>複数のコマンドは改行で区切り、</strong>タスクを実行するときにシステムは特殊記号を使用して実際のデータを置き換えます。<br />
|
||||
例えば、Cisco機器のパスワードを変更する場合、一般的には5つのコマンドを設定する必要があります:<br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
en: |
|
||||
Please change the specified positions in the command to special symbols. <br />
|
||||
Change password account -> {username} <br />
|
||||
Change password -> {password} <br />
|
||||
Login user password -> {login_password} <br />
|
||||
<strong>Multiple commands are separated by new lines,</strong> and when executing tasks, <br />
|
||||
the system will replace the special symbols with real data. <br />
|
||||
For example, to push the password for a Cisco device, you generally need to configure five commands: <br />
|
||||
enable <br />
|
||||
{login_password} <br />
|
||||
configure terminal <br />
|
||||
username {username} privilege 0 password {password} <br />
|
||||
end <br />
|
||||
zh: '自定义命令中如需包含账号的 账号、密码、SSH 连接的用户密码 字段,<br />请使用 {username}、{password}、{login_password}格式,执行任务时会进行替换 。<br />比如针对 Cisco 主机进行改密,一般需要配置五条命令:<br />1. enable<br />2. {login_password}<br />3. configure terminal<br />4. username {username} privilege 0 password {password} <br />5. end'
|
||||
ja: 'カスタム コマンドに SSH 接続用のアカウント番号、パスワード、ユーザー パスワード フィールドを含める必要がある場合は、<br />{ユーザー名}、{パスワード}、{login_password& を使用してください。 # 125; 形式。タスクの実行時に置き換えられます。 <br />たとえば、Cisco ホストのパスワードを変更するには、通常、次の 5 つのコマンドを設定する必要があります:<br />1.enable<br />2.{login_password}<br />3 .ターミナルの設定<br / >4. ユーザー名 {ユーザー名} 権限 0 パスワード {パスワード} <br />5. 終了'
|
||||
en: 'If the custom command needs to include the account number, password, and user password field for SSH connection,<br />Please use {username}, {password}, {login_password&# 125; format, which will be replaced when executing the task. <br />For example, to change the password of a Cisco host, you generally need to configure five commands:<br />1. enable<br />2. {login_password}<br />3. configure terminal<br / >4. username {username} privilege 0 password {password} <br />5. end'
|
||||
|
||||
Params commands label:
|
||||
zh: '自定义命令'
|
||||
ja: 'カスタムコマンド'
|
||||
en: 'Custom command'
|
||||
|
||||
Params recv_timeout label:
|
||||
zh: '超时时间'
|
||||
ja: 'タイムアウト'
|
||||
en: 'Timeout'
|
||||
|
||||
Params recv_timeout help text:
|
||||
zh: '等待命令结果返回的超时时间(秒)'
|
||||
ja: 'コマンドの結果を待つタイムアウト時間(秒)'
|
||||
en: 'The timeout for waiting for the command result to return (Seconds)'
|
||||
|
||||
Params delay_time label:
|
||||
zh: '延迟发送时间'
|
||||
ja: '遅延送信時間'
|
||||
en: 'Delayed send time'
|
||||
|
||||
Params delay_time help text:
|
||||
zh: '每条命令延迟发送的时间间隔(秒)'
|
||||
ja: '各コマンド送信の遅延間隔(秒)'
|
||||
en: 'Time interval for each command delay in sending (Seconds)'
|
||||
|
||||
Params prompt label:
|
||||
zh: '提示符'
|
||||
ja: 'ヒント'
|
||||
en: 'Prompt'
|
||||
|
||||
Params prompt help text:
|
||||
zh: '终端连接后显示的提示符信息(正则表达式)'
|
||||
ja: 'ターミナル接続後に表示されるプロンプト情報(正規表現)'
|
||||
en: 'Prompt information displayed after terminal connection (Regular expression)'
|
||||
|
||||
Params answer label:
|
||||
zh: '命令结果'
|
||||
ja: 'コマンド結果'
|
||||
en: 'Command result'
|
||||
|
||||
Params answer help text:
|
||||
zh: |
|
||||
根据结果匹配度决定是否执行下一条命令,输入框的内容和上方 “自定义命令” 内容按行一一对应(正则表达式)
|
||||
ja: |
|
||||
結果の一致度に基づいて次のコマンドを実行するかどうかを決定します。
|
||||
入力欄の内容は、上の「カスタムコマンド」の内容と行ごとに対応しています(せいきひょうげん)
|
||||
en: |
|
||||
Decide whether to execute the next command based on the result match.
|
||||
The input content corresponds line by line with the content
|
||||
of the `Custom command` above. (Regular expression)
|
||||
|
||||
@@ -54,5 +54,3 @@
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
when: check_conn_after_change
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -39,8 +39,7 @@
|
||||
name: "{{ account.username }}"
|
||||
password: "{{ account.secret }}"
|
||||
host: "%"
|
||||
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||
append_privs: "{{ db_name != '' | bool }}"
|
||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
||||
ignore_errors: true
|
||||
when: db_info is succeeded
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -22,8 +22,3 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
@@ -9,7 +9,7 @@ type:
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -23,8 +23,3 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -9,7 +9,7 @@ priority: 49
|
||||
params:
|
||||
- name: groups
|
||||
type: str
|
||||
label: "{{ 'Params groups label' | trans }}"
|
||||
label: '用户组'
|
||||
default: 'Users,Remote Desktop Users'
|
||||
help_text: "{{ 'Params groups help text' | trans }}"
|
||||
|
||||
@@ -23,8 +23,3 @@ i18n:
|
||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||
|
||||
Params groups label:
|
||||
zh: '用户组'
|
||||
ja: 'グループ'
|
||||
en: 'Groups'
|
||||
|
||||
@@ -12,7 +12,7 @@ logger = get_logger(__name__)
|
||||
class PushAccountManager(BaseChangeSecretPushManager):
|
||||
|
||||
@staticmethod
|
||||
def require_update_version(account, record):
|
||||
def require_update_version(account, recorder):
|
||||
account.skip_history_when_saving = True
|
||||
return False
|
||||
|
||||
@@ -31,29 +31,29 @@ class PushAccountManager(BaseChangeSecretPushManager):
|
||||
secret_type = account.secret_type
|
||||
if not secret:
|
||||
raise ValueError(_('Secret cannot be empty'))
|
||||
record = self.get_or_create_record(asset, account, h['name'])
|
||||
self.get_or_create_record(asset, account, h['name'])
|
||||
new_secret, private_key_path = self.handle_ssh_secret(secret_type, secret, path_dir)
|
||||
h = self.gen_inventory(h, account, new_secret, private_key_path, asset)
|
||||
return h, record
|
||||
return h
|
||||
|
||||
def get_or_create_record(self, asset, account, name):
|
||||
asset_account_id = f'{asset.id}-{account.id}'
|
||||
|
||||
if asset_account_id in self.record_map:
|
||||
record_id = self.record_map[asset_account_id]
|
||||
record = PushSecretRecord.objects.filter(id=record_id).first()
|
||||
recorder = PushSecretRecord.objects.filter(id=record_id).first()
|
||||
else:
|
||||
record = self.create_record(asset, account)
|
||||
recorder = self.create_record(asset, account)
|
||||
|
||||
self.name_record_mapper[name] = record
|
||||
return record
|
||||
self.name_recorder_mapper[name] = recorder
|
||||
return recorder
|
||||
|
||||
def create_record(self, asset, account):
|
||||
record = PushSecretRecord(
|
||||
recorder = PushSecretRecord(
|
||||
asset=asset, account=account, execution=self.execution,
|
||||
comment=f'{account.username}@{asset.address}'
|
||||
)
|
||||
return record
|
||||
return recorder
|
||||
|
||||
def print_summary(self):
|
||||
print('\n\n' + '-' * 80)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
vars:
|
||||
ansible_shell_type: sh
|
||||
ansible_connection: local
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: Verify account (pyfreerdp)
|
||||
|
||||
@@ -16,5 +16,3 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
ansible_user: "{{ account.username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
ansible_ssh_private_key_file: "{{ account.private_key_path }}"
|
||||
ansible_timeout: 30
|
||||
when: not account.become.ansible_become
|
||||
|
||||
- name: Verify account connectivity(Switch)
|
||||
@@ -21,5 +20,4 @@
|
||||
ansible_become_method: "{{ account.become.ansible_become_method }}"
|
||||
ansible_become_user: "{{ account.become.ansible_become_user }}"
|
||||
ansible_become_password: "{{ account.become.ansible_become_password }}"
|
||||
ansible_timeout: 30
|
||||
when: account.become.ansible_become
|
||||
|
||||
@@ -9,4 +9,3 @@
|
||||
vars:
|
||||
ansible_user: "{{ account.full_username }}"
|
||||
ansible_password: "{{ account.secret }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
@@ -85,7 +85,6 @@ class VerifyAccountManager(AccountBasePlaybookManager):
|
||||
def on_host_error(self, host, error, result):
|
||||
account = self.host_account_mapper.get(host)
|
||||
try:
|
||||
error_tp = account.get_err_connectivity(error)
|
||||
account.set_connectivity(error_tp)
|
||||
account.set_connectivity(Connectivity.ERR)
|
||||
except Exception as e:
|
||||
print(f'\033[31m Update account {account.name} connectivity failed: {e} \033[0m\n')
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
- hosts: website
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
|
||||
tasks:
|
||||
- name: Verify account
|
||||
website_ping:
|
||||
login_host: "{{ jms_asset.address }}"
|
||||
login_user: "{{ account.username }}"
|
||||
login_password: "{{ account.secret }}"
|
||||
steps: "{{ params.steps }}"
|
||||
load_state: "{{ params.load_state }}"
|
||||
@@ -1,50 +0,0 @@
|
||||
id: verify_account_website
|
||||
name: "{{ 'Website account verify' | trans }}"
|
||||
category: web
|
||||
type:
|
||||
- website
|
||||
method: verify_account
|
||||
priority: 50
|
||||
params:
|
||||
- name: load_state
|
||||
type: choice
|
||||
label: "{{ 'Load state' | trans }}"
|
||||
choices:
|
||||
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||
default: 'load'
|
||||
- name: steps
|
||||
type: list
|
||||
label: "{{ 'Steps' | trans }}"
|
||||
help_text: "{{ 'Params step help text' | trans }}"
|
||||
default: []
|
||||
i18n:
|
||||
Website account verify:
|
||||
zh: 使用 Playwright 模拟浏览器验证账号
|
||||
ja: Playwright を使用してブラウザをシミュレートし、アカウントの検証を行います
|
||||
en: Use Playwright to simulate a browser for account verification.
|
||||
Load state:
|
||||
zh: 加载状态检测
|
||||
en: Load state detection
|
||||
ja: ロード状態の検出
|
||||
Steps:
|
||||
zh: 步骤
|
||||
en: Steps
|
||||
ja: 手順
|
||||
Network idle:
|
||||
zh: 网络空闲
|
||||
en: Network idle
|
||||
ja: ネットワークが空いた状態
|
||||
Dom content loaded:
|
||||
zh: 文档内容加载完成
|
||||
en: Dom content loaded
|
||||
ja: ドキュメントの内容がロードされた状態
|
||||
Load completed:
|
||||
zh: 全部加载完成
|
||||
en: All load completed
|
||||
ja: すべてのロードが完了した状態
|
||||
Params step help text:
|
||||
zh: 配置步骤,根据配置决定任务执行步骤
|
||||
ja: パラメータを設定し、設定に基づいてタスクの実行手順を決定します
|
||||
en: Configure steps, and determine the task execution steps based on the configuration.
|
||||
@@ -1,5 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
from common.utils import get_logger
|
||||
|
||||
@@ -11,9 +14,6 @@ __all__ = ['AZUREVaultClient']
|
||||
class AZUREVaultClient(object):
|
||||
|
||||
def __init__(self, vault_url, tenant_id, client_id, client_secret):
|
||||
from azure.identity import ClientSecretCredential
|
||||
from azure.keyvault.secrets import SecretClient
|
||||
|
||||
authentication_endpoint = 'https://login.microsoftonline.com/' \
|
||||
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
|
||||
|
||||
@@ -23,8 +23,6 @@ class AZUREVaultClient(object):
|
||||
self.client = SecretClient(vault_url=vault_url, credential=credentials)
|
||||
|
||||
def is_active(self):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
self.client.set_secret('jumpserver', '666')
|
||||
except (ResourceNotFoundError, ClientAuthenticationError) as e:
|
||||
@@ -34,8 +32,6 @@ class AZUREVaultClient(object):
|
||||
return True, ''
|
||||
|
||||
def get(self, name, version=None):
|
||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||
|
||||
try:
|
||||
secret = self.client.get_secret(name, version)
|
||||
return secret.value
|
||||
|
||||
@@ -17,7 +17,7 @@ __all__ = [
|
||||
'AutomationTypes', 'SecretStrategy', 'SSHKeyStrategy', 'Connectivity',
|
||||
'DEFAULT_PASSWORD_LENGTH', 'DEFAULT_PASSWORD_RULES', 'TriggerChoice',
|
||||
'PushAccountActionChoice', 'AccountBackupType', 'ChangeSecretRecordStatusChoice',
|
||||
'GatherAccountDetailField', 'ChangeSecretAccountStatus'
|
||||
'GatherAccountDetailField'
|
||||
]
|
||||
|
||||
|
||||
@@ -117,12 +117,6 @@ class ChangeSecretRecordStatusChoice(models.TextChoices):
|
||||
pending = 'pending', _('Pending')
|
||||
|
||||
|
||||
class ChangeSecretAccountStatus(models.TextChoices):
|
||||
QUEUED = 'queued', _('Queued')
|
||||
READY = 'ready', _('Ready')
|
||||
PROCESSING = 'processing', _('Processing')
|
||||
|
||||
|
||||
class GatherAccountDetailField(models.TextChoices):
|
||||
can_login = 'can_login', _('Can login')
|
||||
superuser = 'superuser', _('Superuser')
|
||||
|
||||
@@ -17,7 +17,6 @@ from common.utils.timezone import local_zero_hour, local_now
|
||||
from .const.automation import ChangeSecretRecordStatusChoice
|
||||
from .models import Account, GatheredAccount, ChangeSecretRecord, PushSecretRecord, IntegrationApplication, \
|
||||
AutomationExecution
|
||||
from .utils import account_secret_task_status
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
@@ -234,7 +233,7 @@ class AutomationExecutionFilterSet(DaysExecutionFilterMixin, BaseFilterSet):
|
||||
|
||||
class Meta:
|
||||
model = AutomationExecution
|
||||
fields = ["days", 'trigger', 'automation__name']
|
||||
fields = ["days", 'trigger', 'automation_id', 'automation__name']
|
||||
|
||||
|
||||
class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterSet):
|
||||
@@ -243,25 +242,3 @@ class PushAccountRecordFilterSet(SecretRecordMixin, UUIDFilterMixin, BaseFilterS
|
||||
class Meta:
|
||||
model = PushSecretRecord
|
||||
fields = ["id", "status", "asset_id", "execution_id"]
|
||||
|
||||
|
||||
class ChangeSecretStatusFilterSet(BaseFilterSet):
|
||||
asset_name = drf_filters.CharFilter(
|
||||
field_name="asset__name", lookup_expr="icontains"
|
||||
)
|
||||
status = drf_filters.CharFilter(method='filter_dynamic')
|
||||
execution_id = drf_filters.CharFilter(method='filter_dynamic')
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = ["username"]
|
||||
|
||||
@staticmethod
|
||||
def filter_dynamic(queryset, name, value):
|
||||
_ids = list(queryset.values_list('id', flat=True))
|
||||
data_map = {
|
||||
_id: account_secret_task_status.get(str(_id)).get(name)
|
||||
for _id in _ids
|
||||
}
|
||||
matched = [_id for _id, v in data_map.items() if v == value]
|
||||
return queryset.filter(id__in=matched)
|
||||
|
||||
@@ -46,16 +46,11 @@ class Migration(migrations.Migration):
|
||||
],
|
||||
options={
|
||||
'verbose_name': 'Account',
|
||||
'permissions': [
|
||||
('view_accountsecret', 'Can view asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||
('verify_account', 'Can verify account'),
|
||||
('push_account', 'Can push account'),
|
||||
('remove_account', 'Can remove account'),
|
||||
('view_accountsession', 'Can view session'),
|
||||
('view_accountactivity', 'Can view activity')
|
||||
],
|
||||
'permissions': [('view_accountsecret', 'Can view asset account secret'),
|
||||
('view_historyaccount', 'Can view asset history account'),
|
||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||
('verify_account', 'Can verify account'), ('push_account', 'Can push account'),
|
||||
('remove_account', 'Can remove account')],
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
|
||||
@@ -335,7 +335,6 @@ class Migration(migrations.Migration):
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
"verbose_name": "Check engine",
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
@@ -630,15 +629,10 @@ class Migration(migrations.Migration):
|
||||
name="connectivity",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
('-', 'Unknown'),
|
||||
('na', 'N/A'),
|
||||
('ok', 'OK'),
|
||||
('err', 'Error'),
|
||||
('auth_err', 'Authentication error'),
|
||||
('password_err', 'Invalid password error'),
|
||||
('openssh_key_err', 'OpenSSH key error'),
|
||||
('ntlm_err', 'NTLM credentials rejected error'),
|
||||
('create_temp_err', 'Create temporary error')
|
||||
("-", "Unknown"),
|
||||
("na", "N/A"),
|
||||
("ok", "OK"),
|
||||
("err", "Error"),
|
||||
],
|
||||
default="-",
|
||||
max_length=16,
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Generated by Django 4.1.13 on 2025-05-06 10:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('accounts', '0006_alter_accountrisk_username_and_more'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='account',
|
||||
name='connectivity',
|
||||
field=models.CharField(choices=[
|
||||
('-', 'Unknown'),
|
||||
('na', 'N/A'),
|
||||
('ok', 'OK'),
|
||||
('err', 'Error'),
|
||||
('rdp_err', 'RDP error'),
|
||||
('auth_err', 'Authentication error'),
|
||||
('password_err', 'Invalid password error'),
|
||||
('openssh_key_err', 'OpenSSH key error'),
|
||||
('ntlm_err', 'NTLM credentials rejected error'),
|
||||
('create_temp_err', 'Create temporary error')
|
||||
],
|
||||
default='-', max_length=16, verbose_name='Connectivity'),
|
||||
),
|
||||
]
|
||||
@@ -116,8 +116,6 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
('verify_account', _('Can verify account')),
|
||||
('push_account', _('Can push account')),
|
||||
('remove_account', _('Can remove account')),
|
||||
('view_accountsession', _('Can view session')),
|
||||
('view_accountactivity', _('Can view activity')),
|
||||
]
|
||||
|
||||
def __str__(self):
|
||||
@@ -132,7 +130,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.platform
|
||||
|
||||
@lazyproperty
|
||||
def alias(self) -> str:
|
||||
def alias(self):
|
||||
"""
|
||||
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
||||
"""
|
||||
@@ -140,13 +138,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.username
|
||||
return str(self.id)
|
||||
|
||||
def is_virtual(self) -> bool:
|
||||
def is_virtual(self):
|
||||
"""
|
||||
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
||||
"""
|
||||
return self.alias.startswith('@')
|
||||
|
||||
def is_ds_account(self) -> bool:
|
||||
def is_ds_account(self):
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
if not self.asset.is_directory_service:
|
||||
@@ -160,7 +158,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.asset.ds
|
||||
|
||||
@lazyproperty
|
||||
def ds_domain(self) -> str:
|
||||
def ds_domain(self):
|
||||
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
||||
if self.is_virtual():
|
||||
return ''
|
||||
@@ -168,21 +166,18 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
||||
return self.ds.domain_name
|
||||
return ''
|
||||
|
||||
def username_has_domain(self):
|
||||
return '@' in self.username or '\\' in self.username
|
||||
|
||||
@property
|
||||
def full_username(self) -> str:
|
||||
if not self.username_has_domain() and self.ds_domain:
|
||||
def full_username(self):
|
||||
if self.ds_domain:
|
||||
return '{}@{}'.format(self.username, self.ds_domain)
|
||||
return self.username
|
||||
|
||||
@lazyproperty
|
||||
def has_secret(self) -> bool:
|
||||
def has_secret(self):
|
||||
return bool(self.secret)
|
||||
|
||||
@lazyproperty
|
||||
def versions(self) -> int:
|
||||
def versions(self):
|
||||
return self.history.count()
|
||||
|
||||
def get_su_from_accounts(self):
|
||||
|
||||
@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
|
||||
return qs.filter(*query)
|
||||
|
||||
@property
|
||||
def accounts_amount(self) -> int:
|
||||
def accounts_amount(self):
|
||||
return self.get_accounts().count()
|
||||
|
||||
@property
|
||||
|
||||
@@ -68,10 +68,8 @@ class AccountRisk(JMSOrgBaseModel):
|
||||
related_name='risks', null=True
|
||||
)
|
||||
risk = models.CharField(max_length=128, verbose_name=_('Risk'), choices=RiskChoice.choices)
|
||||
status = models.CharField(
|
||||
max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
||||
blank=True, verbose_name=_('Status')
|
||||
)
|
||||
status = models.CharField(max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
||||
blank=True, verbose_name=_('Status'))
|
||||
details = models.JSONField(default=list, verbose_name=_('Detail'))
|
||||
|
||||
class Meta:
|
||||
@@ -121,9 +119,6 @@ class CheckAccountEngine(JMSBaseModel):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
class Meta:
|
||||
verbose_name = _('Check engine')
|
||||
|
||||
@staticmethod
|
||||
def get_default_engines():
|
||||
data = [
|
||||
@@ -133,7 +128,7 @@ class CheckAccountEngine(JMSBaseModel):
|
||||
"name": _("Check the discovered accounts"),
|
||||
"comment": _(
|
||||
"Perform checks and analyses based on automatically discovered account results, "
|
||||
"including user groups, public keys, sudoers, and other information."
|
||||
"including user groups, public keys, sudoers, and other information"
|
||||
)
|
||||
},
|
||||
{
|
||||
@@ -149,13 +144,13 @@ class CheckAccountEngine(JMSBaseModel):
|
||||
"id": "00000000-0000-0000-0000-000000000003",
|
||||
"slug": "check_account_repeat",
|
||||
"name": _("Check if the account and password are repeated"),
|
||||
"comment": _("Check if the account is the same as other accounts.")
|
||||
"comment": _("Check if the account is the same as other accounts")
|
||||
},
|
||||
{
|
||||
"id": "00000000-0000-0000-0000-000000000004",
|
||||
"slug": "check_account_leak",
|
||||
"name": _("Check whether the account password is a common password"),
|
||||
"comment": _("Check whether the account password is a commonly leaked password.")
|
||||
"comment": _("Check whether the account password is a commonly leaked password")
|
||||
},
|
||||
]
|
||||
return data
|
||||
|
||||
@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return bool(self.secret)
|
||||
|
||||
@property
|
||||
def has_username(self) -> bool:
|
||||
def has_username(self):
|
||||
return bool(self.username)
|
||||
|
||||
@property
|
||||
def spec_info(self) -> dict:
|
||||
def spec_info(self):
|
||||
data = {}
|
||||
if self.secret_type != SecretType.SSH_KEY:
|
||||
return data
|
||||
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return data
|
||||
|
||||
@property
|
||||
def password(self) -> str:
|
||||
def password(self):
|
||||
if self.secret_type == SecretType.PASSWORD:
|
||||
return self.secret
|
||||
return None
|
||||
|
||||
@property
|
||||
def private_key(self) -> str:
|
||||
def private_key(self):
|
||||
if self.secret_type == SecretType.SSH_KEY:
|
||||
return self.secret
|
||||
return None
|
||||
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def ssh_key_fingerprint(self) -> str:
|
||||
def ssh_key_fingerprint(self):
|
||||
if self.public_key:
|
||||
public_key = self.public_key
|
||||
elif self.private_key:
|
||||
|
||||
@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
|
||||
__secret = None
|
||||
|
||||
@property
|
||||
def secret(self) -> str:
|
||||
def secret(self):
|
||||
if self.__secret:
|
||||
return self.__secret
|
||||
from accounts.backends import vault_client
|
||||
|
||||
@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
verbose_name = _('Virtual account')
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
def name(self):
|
||||
return self.get_alias_display()
|
||||
|
||||
@property
|
||||
def username(self) -> str:
|
||||
def username(self):
|
||||
usernames_map = {
|
||||
AliasAccount.INPUT: _("Manual input"),
|
||||
AliasAccount.USER: _("Same with user"),
|
||||
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
|
||||
return usernames_map.get(self.alias, '')
|
||||
|
||||
@property
|
||||
def comment(self) -> str:
|
||||
def comment(self):
|
||||
comments_map = {
|
||||
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
|
||||
AliasAccount.USER: _('The account username name same with user on connect'),
|
||||
|
||||
@@ -6,7 +6,6 @@ from common.tasks import send_mail_attachment_async, upload_backup_to_obj_storag
|
||||
from notifications.notifications import UserMessage
|
||||
from terminal.models.component.storage import ReplayStorage
|
||||
from users.models import User
|
||||
from users.utils import activate_user_language
|
||||
|
||||
|
||||
class AccountBackupExecutionTaskMsg:
|
||||
@@ -29,10 +28,9 @@ class AccountBackupExecutionTaskMsg:
|
||||
).format(name)
|
||||
|
||||
def publish(self, attachment_list=None):
|
||||
with activate_user_language(self.user):
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachment_list
|
||||
)
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachment_list
|
||||
)
|
||||
|
||||
|
||||
class AccountBackupByObjStorageExecutionTaskMsg:
|
||||
@@ -76,10 +74,9 @@ class ChangeSecretExecutionTaskMsg:
|
||||
return self.summary + '\n' + default_message
|
||||
|
||||
def publish(self, attachments=None):
|
||||
with activate_user_language(self.user):
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachments
|
||||
)
|
||||
send_mail_attachment_async(
|
||||
self.subject, self.message, [self.user.email], attachments
|
||||
)
|
||||
|
||||
|
||||
class GatherAccountChangeMsg(UserMessage):
|
||||
|
||||
@@ -23,7 +23,7 @@ TYPE_CHOICES = [
|
||||
("delete_both", _("Delete remote")),
|
||||
("add_account", _("Add account")),
|
||||
("change_password_add", _("Change password and Add")),
|
||||
("change_password", _("Change secret")),
|
||||
("change_password", _("Change password")),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -246,7 +246,6 @@ class AccountSerializer(AccountCreateUpdateSerializerMixin, BaseAccountSerialize
|
||||
'source', 'source_id', 'secret_reset',
|
||||
] + AccountCreateUpdateSerializerMixin.Meta.fields + automation_fields
|
||||
read_only_fields = BaseAccountSerializer.Meta.read_only_fields + automation_fields
|
||||
fields = [f for f in fields if f not in ['spec_info']]
|
||||
extra_kwargs = {
|
||||
**BaseAccountSerializer.Meta.extra_kwargs,
|
||||
'name': {'required': False},
|
||||
@@ -269,7 +268,7 @@ class AccountDetailSerializer(AccountSerializer):
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
model = Account
|
||||
fields = AccountSerializer.Meta.fields + ['has_secret', 'spec_info']
|
||||
fields = AccountSerializer.Meta.fields + ['has_secret']
|
||||
read_only_fields = AccountSerializer.Meta.read_only_fields + ['has_secret']
|
||||
|
||||
|
||||
@@ -456,8 +455,6 @@ class AssetAccountBulkSerializer(
|
||||
|
||||
|
||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta(AccountSerializer.Meta):
|
||||
fields = AccountSerializer.Meta.fields + ['spec_info']
|
||||
extra_kwargs = {
|
||||
@@ -472,7 +469,6 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||
|
||||
class AccountHistorySerializer(serializers.ModelSerializer):
|
||||
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
|
||||
secret = serializers.CharField(label=_('Secret'), read_only=True)
|
||||
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -70,14 +70,12 @@ class AuthValidateMixin(serializers.Serializer):
|
||||
class BaseAccountSerializer(
|
||||
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
|
||||
):
|
||||
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = BaseAccount
|
||||
fields_mini = ["id", "name", "username"]
|
||||
fields_small = fields_mini + [
|
||||
"secret_type", "secret", "passphrase",
|
||||
"privileged", "is_active",
|
||||
"privileged", "is_active", "spec_info",
|
||||
]
|
||||
fields_other = ["created_by", "date_created", "date_updated", "comment"]
|
||||
fields = fields_small + fields_other + ["labels"]
|
||||
|
||||
@@ -5,7 +5,6 @@ from rest_framework import serializers
|
||||
from accounts.models import IntegrationApplication
|
||||
from acls.serializers.rules import ip_group_child_validator, ip_group_help_text
|
||||
from common.serializers.fields import JSONManyToManyField
|
||||
from common.utils import random_string
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
|
||||
|
||||
@@ -38,10 +37,6 @@ class IntegrationApplicationSerializer(BulkOrgResourceModelSerializer):
|
||||
data['logo'] = static('img/logo.png')
|
||||
return data
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs['secret'] = random_string(36)
|
||||
return attrs
|
||||
|
||||
|
||||
class IntegrationAccountSecretSerializer(serializers.Serializer):
|
||||
asset = serializers.CharField(required=False, allow_blank=True)
|
||||
|
||||
@@ -57,15 +57,11 @@ class AccountTemplateSerializer(BaseAccountSerializer):
|
||||
fields_unimport_template = ['push_params']
|
||||
|
||||
|
||||
class AccountDetailTemplateSerializer(AccountTemplateSerializer):
|
||||
class AccountTemplateSecretSerializer(SecretReadableMixin, AccountTemplateSerializer):
|
||||
class Meta(AccountTemplateSerializer.Meta):
|
||||
fields = AccountTemplateSerializer.Meta.fields + ['spec_info']
|
||||
|
||||
|
||||
class AccountTemplateSecretSerializer(SecretReadableMixin, AccountDetailTemplateSerializer):
|
||||
class Meta(AccountDetailTemplateSerializer.Meta):
|
||||
fields = AccountDetailTemplateSerializer.Meta.fields
|
||||
extra_kwargs = {
|
||||
**AccountDetailTemplateSerializer.Meta.extra_kwargs,
|
||||
**AccountTemplateSerializer.Meta.extra_kwargs,
|
||||
'secret': {'write_only': False},
|
||||
'spec_info': {'label': _('Spec info')},
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.conf import settings
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from accounts.const import AutomationTypes, AccountBackupType
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.models import BackupAccountAutomation
|
||||
from common.serializers.fields import EncryptedField
|
||||
from common.utils import get_logger
|
||||
@@ -42,17 +41,6 @@ class BackupAccountSerializer(BaseAutomationSerializer):
|
||||
'types': {'label': _('Asset type')}
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.set_backup_type_choices()
|
||||
|
||||
def set_backup_type_choices(self):
|
||||
field_backup_type = self.fields.get("backup_type")
|
||||
if not field_backup_type:
|
||||
return
|
||||
if not settings.XPACK_LICENSE_IS_VALID:
|
||||
field_backup_type._choices.pop(AccountBackupType.object_storage, None)
|
||||
|
||||
@property
|
||||
def model_type(self):
|
||||
return AutomationTypes.backup_account
|
||||
|
||||
@@ -16,7 +16,6 @@ from assets.models import Asset
|
||||
from common.serializers.fields import LabeledChoiceField, ObjectRelatedField
|
||||
from common.utils import get_logger
|
||||
from .base import BaseAutomationSerializer
|
||||
from ...utils import account_secret_task_status
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
@@ -27,7 +26,6 @@ __all__ = [
|
||||
'ChangeSecretRecordBackUpSerializer',
|
||||
'ChangeSecretUpdateAssetSerializer',
|
||||
'ChangeSecretUpdateNodeSerializer',
|
||||
'ChangeSecretAccountSerializer'
|
||||
]
|
||||
|
||||
|
||||
@@ -130,7 +128,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj) -> bool:
|
||||
def get_is_success(obj):
|
||||
return obj.status == ChangeSecretRecordStatusChoice.success
|
||||
|
||||
|
||||
@@ -157,7 +155,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_asset(instance) -> str:
|
||||
def get_asset(instance):
|
||||
return str(instance.asset)
|
||||
|
||||
@staticmethod
|
||||
@@ -165,7 +163,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
||||
return str(instance.account)
|
||||
|
||||
@staticmethod
|
||||
def get_is_success(obj) -> str:
|
||||
def get_is_success(obj):
|
||||
if obj.status == ChangeSecretRecordStatusChoice.success.value:
|
||||
return _("Success")
|
||||
return _("Failed")
|
||||
@@ -181,24 +179,3 @@ class ChangeSecretUpdateNodeSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = ChangeSecretAutomation
|
||||
fields = ['id', 'nodes']
|
||||
|
||||
|
||||
class ChangeSecretAccountSerializer(serializers.ModelSerializer):
|
||||
asset = ObjectRelatedField(
|
||||
queryset=Asset.objects.all(), required=False, label=_("Asset")
|
||||
)
|
||||
ttl = serializers.SerializerMethodField(label=_('TTL'))
|
||||
meta = serializers.SerializerMethodField(label=_('Meta'))
|
||||
|
||||
class Meta:
|
||||
model = Account
|
||||
fields = ['id', 'username', 'asset', 'meta', 'ttl']
|
||||
read_only_fields = fields
|
||||
|
||||
@staticmethod
|
||||
def get_meta(obj) -> dict:
|
||||
return account_secret_task_status.get(str(obj.id))
|
||||
|
||||
@staticmethod
|
||||
def get_ttl(obj) -> int:
|
||||
return account_secret_task_status.get_ttl(str(obj.id))
|
||||
|
||||
@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
|
||||
risk_summary = serializers.SerializerMethodField()
|
||||
|
||||
@staticmethod
|
||||
def get_risk_summary(obj) -> dict:
|
||||
def get_risk_summary(obj):
|
||||
summary = {}
|
||||
for risk in RiskChoice.choices:
|
||||
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)
|
||||
|
||||
@@ -28,7 +28,7 @@ class DiscoverAccountAutomationSerializer(BaseAutomationSerializer):
|
||||
+ read_only_fields)
|
||||
extra_kwargs = {
|
||||
'check_risk': {
|
||||
'help_text': _('Whether to check the risk of the discovered accounts.'),
|
||||
'help_text': _('Whether to check the risk of the gathered accounts.'),
|
||||
},
|
||||
**BaseAutomationSerializer.Meta.extra_kwargs
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime
|
||||
from collections import defaultdict
|
||||
|
||||
from celery import shared_task
|
||||
from django.db.models import Q
|
||||
@@ -73,43 +72,24 @@ def execute_automation_record_task(record_ids, tp):
|
||||
task_name = gettext_noop('Execute automation record')
|
||||
|
||||
with tmp_to_root_org():
|
||||
records = ChangeSecretRecord.objects.filter(id__in=record_ids).order_by('-date_updated')
|
||||
records = ChangeSecretRecord.objects.filter(id__in=record_ids)
|
||||
|
||||
if not records:
|
||||
logger.error(f'No automation record found: {record_ids}')
|
||||
logger.error('No automation record found: {}'.format(record_ids))
|
||||
return
|
||||
|
||||
seen_accounts = set()
|
||||
unique_records = []
|
||||
for rec in records:
|
||||
acct = str(rec.account_id)
|
||||
if acct not in seen_accounts:
|
||||
seen_accounts.add(acct)
|
||||
unique_records.append(rec)
|
||||
|
||||
exec_groups = defaultdict(list)
|
||||
for rec in unique_records:
|
||||
exec_groups[rec.execution_id].append(rec)
|
||||
|
||||
for __, group in exec_groups.items():
|
||||
latest_rec = group[0]
|
||||
snapshot = getattr(latest_rec.execution, 'snapshot', {}) or {}
|
||||
|
||||
record_map = {f"{r.asset_id}-{r.account_id}": str(r.id) for r in group}
|
||||
assets = [str(r.asset_id) for r in group]
|
||||
accounts = [str(r.account_id) for r in group]
|
||||
|
||||
task_snapshot = {
|
||||
'params': {},
|
||||
'record_map': record_map,
|
||||
'secret': latest_rec.new_secret,
|
||||
'secret_type': snapshot.get('secret_type'),
|
||||
'assets': assets,
|
||||
'accounts': accounts,
|
||||
}
|
||||
|
||||
with tmp_to_org(latest_rec.execution.org_id):
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
record = records[0]
|
||||
record_map = {f'{record.asset_id}-{record.account_id}': str(record.id) for record in records}
|
||||
task_snapshot = {
|
||||
'params': {},
|
||||
'record_map': record_map,
|
||||
'secret': record.new_secret,
|
||||
'secret_type': record.execution.snapshot.get('secret_type'),
|
||||
'assets': [str(instance.asset_id) for instance in records],
|
||||
'accounts': [str(instance.account_id) for instance in records],
|
||||
}
|
||||
with tmp_to_org(record.execution.org_id):
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
|
||||
@shared_task(
|
||||
@@ -127,18 +107,16 @@ def execute_automation_record_task(record_ids, tp):
|
||||
)
|
||||
@register_as_period_task(crontab=CRONTAB_AT_AM_THREE)
|
||||
def clean_change_secret_and_push_record_period():
|
||||
from accounts.models import ChangeSecretRecord, PushSecretRecord
|
||||
from accounts.models import ChangeSecretRecord
|
||||
print('Start clean change secret and push record period')
|
||||
with tmp_to_root_org():
|
||||
now = timezone.now()
|
||||
days = get_log_keep_day('ACCOUNT_CHANGE_SECRET_RECORD_KEEP_DAYS')
|
||||
expired_time = now - datetime.timedelta(days=days)
|
||||
expired_day = now - datetime.timedelta(days=days)
|
||||
records = ChangeSecretRecord.objects.filter(
|
||||
date_updated__lt=expired_day
|
||||
).filter(
|
||||
Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
|
||||
)
|
||||
|
||||
null_related_q = Q(execution__isnull=True) | Q(asset__isnull=True) | Q(account__isnull=True)
|
||||
expired_q = Q(date_updated__lt=expired_time)
|
||||
|
||||
ChangeSecretRecord.objects.filter(null_related_q).delete()
|
||||
ChangeSecretRecord.objects.filter(expired_q).delete()
|
||||
|
||||
PushSecretRecord.objects.filter(null_related_q).delete()
|
||||
PushSecretRecord.objects.filter(expired_q).delete()
|
||||
records.delete()
|
||||
|
||||
@@ -1,107 +1,37 @@
|
||||
from collections import defaultdict
|
||||
|
||||
from celery import shared_task
|
||||
from django.utils.translation import gettext_noop, gettext_lazy as _
|
||||
|
||||
from accounts.const import AutomationTypes, ChangeSecretAccountStatus
|
||||
from accounts.const import AutomationTypes
|
||||
from accounts.tasks.common import quickstart_automation_by_snapshot
|
||||
from accounts.utils import account_secret_task_status
|
||||
from common.utils import get_logger
|
||||
from orgs.utils import tmp_to_org
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = [
|
||||
'push_accounts_to_assets_task', 'change_secret_accounts_to_assets_task'
|
||||
'push_accounts_to_assets_task',
|
||||
]
|
||||
|
||||
|
||||
def _process_accounts(account_ids, automation_model, default_name, automation_type, snapshot=None):
|
||||
from accounts.models import Account
|
||||
accounts = Account.objects.filter(id__in=account_ids)
|
||||
if not accounts:
|
||||
logger.warning(
|
||||
"No accounts found for automation task %s with ids %s",
|
||||
automation_type, account_ids
|
||||
)
|
||||
return
|
||||
|
||||
task_name = automation_model.generate_unique_name(gettext_noop(default_name))
|
||||
snapshot = snapshot or {}
|
||||
snapshot.update({
|
||||
'accounts': [str(a.id) for a in accounts],
|
||||
'assets': [str(a.asset_id) for a in accounts],
|
||||
})
|
||||
|
||||
quickstart_automation_by_snapshot(task_name, automation_type, snapshot)
|
||||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible",
|
||||
verbose_name=_('Push accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"Whenever an account is created or modified and needs pushing to assets, run this task"
|
||||
"When creating or modifying an account requires account push, this task is executed"
|
||||
)
|
||||
)
|
||||
def push_accounts_to_assets_task(account_ids, params=None):
|
||||
from accounts.models import PushAccountAutomation
|
||||
snapshot = {
|
||||
'params': params or {},
|
||||
}
|
||||
_process_accounts(
|
||||
account_ids,
|
||||
PushAccountAutomation,
|
||||
_("Push accounts to assets"),
|
||||
AutomationTypes.push_account,
|
||||
snapshot=snapshot
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
queue="ansible",
|
||||
verbose_name=_('Change secret accounts to assets'),
|
||||
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None),
|
||||
description=_(
|
||||
"When a secret on an account changes and needs pushing to assets, run this task"
|
||||
)
|
||||
)
|
||||
def change_secret_accounts_to_assets_task(account_ids, params=None, snapshot=None, trigger='manual'):
|
||||
from accounts.models import ChangeSecretAutomation, Account
|
||||
|
||||
manager = account_secret_task_status
|
||||
|
||||
if trigger == 'delay':
|
||||
for _id in manager.account_ids:
|
||||
status = manager.get_status(_id)
|
||||
# Check if the account is in QUEUED status
|
||||
if status == ChangeSecretAccountStatus.QUEUED:
|
||||
account_ids.append(_id)
|
||||
manager.set_status(_id, ChangeSecretAccountStatus.READY)
|
||||
|
||||
if not account_ids:
|
||||
return
|
||||
from accounts.models import Account
|
||||
|
||||
accounts = Account.objects.filter(id__in=account_ids)
|
||||
if not accounts:
|
||||
logger.warning(
|
||||
"No accounts found for change secret automation task with ids %s",
|
||||
account_ids
|
||||
)
|
||||
return
|
||||
task_name = gettext_noop("Push accounts to assets")
|
||||
task_name = PushAccountAutomation.generate_unique_name(task_name)
|
||||
|
||||
grouped_ids = defaultdict(lambda: defaultdict(list))
|
||||
for account in accounts:
|
||||
grouped_ids[account.org_id][account.secret_type].append(str(account.id))
|
||||
task_snapshot = {
|
||||
'accounts': [str(account.id) for account in accounts],
|
||||
'assets': [str(account.asset_id) for account in accounts],
|
||||
'params': params or {},
|
||||
}
|
||||
|
||||
snapshot = snapshot or {}
|
||||
for org_id, secret_map in grouped_ids.items():
|
||||
with tmp_to_org(org_id):
|
||||
for secret_type, ids in secret_map.items():
|
||||
snapshot['secret_type'] = secret_type
|
||||
_process_accounts(
|
||||
ids,
|
||||
ChangeSecretAutomation,
|
||||
_("Change secret accounts to assets"),
|
||||
AutomationTypes.change_secret,
|
||||
snapshot=snapshot
|
||||
)
|
||||
tp = AutomationTypes.push_account
|
||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||
|
||||
@@ -17,7 +17,6 @@ router.register(r'account-template-secrets', api.AccountTemplateSecretsViewSet,
|
||||
router.register(r'account-backup-plans', api.BackupAccountViewSet, 'account-backup')
|
||||
router.register(r'account-backup-plan-executions', api.BackupAccountExecutionViewSet, 'account-backup-execution')
|
||||
router.register(r'change-secret-automations', api.ChangeSecretAutomationViewSet, 'change-secret-automation')
|
||||
router.register(r'change-secret-status', api.ChangeSecretStatusViewSet, 'change-secret-status')
|
||||
router.register(r'change-secret-executions', api.ChangSecretExecutionViewSet, 'change-secret-execution')
|
||||
router.register(r'change-secret-records', api.ChangeSecretRecordViewSet, 'change-secret-record')
|
||||
router.register(r'gather-account-automations', api.DiscoverAccountsAutomationViewSet, 'gather-account-automation')
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import copy
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
from accounts.const import SecretType, DEFAULT_PASSWORD_RULES
|
||||
|
||||
from common.utils import ssh_key_gen, random_string
|
||||
from common.utils import validate_ssh_private_key, parse_ssh_private_key_str
|
||||
|
||||
@@ -62,80 +61,3 @@ def validate_ssh_key(ssh_key, passphrase=None):
|
||||
if not valid:
|
||||
raise serializers.ValidationError(_("private key invalid or passphrase error"))
|
||||
return parse_ssh_private_key_str(ssh_key, passphrase)
|
||||
|
||||
|
||||
class AccountSecretTaskStatus:
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
prefix='queue:change_secret:',
|
||||
debounce_key='debounce:change_secret:task',
|
||||
debounce_timeout=10,
|
||||
queue_status_timeout=60,
|
||||
default_timeout=3600,
|
||||
delayed_task_countdown=20,
|
||||
):
|
||||
self.prefix = prefix
|
||||
self.debounce_key = debounce_key
|
||||
self.debounce_timeout = debounce_timeout
|
||||
self.queue_status_timeout = queue_status_timeout
|
||||
self.default_timeout = default_timeout
|
||||
self.delayed_task_countdown = delayed_task_countdown
|
||||
self.enabled = getattr(settings, 'CHANGE_SECRET_AFTER_SESSION_END', False)
|
||||
|
||||
def _key(self, identifier):
|
||||
return f"{self.prefix}{identifier}"
|
||||
|
||||
@property
|
||||
def account_ids(self):
|
||||
for key in cache.iter_keys(f"{self.prefix}*"):
|
||||
yield key.split(':')[-1]
|
||||
|
||||
def is_debounced(self):
|
||||
return cache.add(self.debounce_key, True, self.debounce_timeout)
|
||||
|
||||
def get_queue_key(self, identifier):
|
||||
return self._key(identifier)
|
||||
|
||||
def set_status(
|
||||
self,
|
||||
identifier,
|
||||
status,
|
||||
timeout=None,
|
||||
metadata=None,
|
||||
use_add=False
|
||||
):
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
key = self._key(identifier)
|
||||
data = {"status": status}
|
||||
if metadata:
|
||||
data.update(metadata)
|
||||
|
||||
if use_add:
|
||||
return cache.add(key, data, timeout or self.queue_status_timeout)
|
||||
|
||||
cache.set(key, data, timeout or self.default_timeout)
|
||||
|
||||
def get(self, identifier):
|
||||
return cache.get(self._key(identifier), {})
|
||||
|
||||
def get_status(self, identifier):
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
record = cache.get(self._key(identifier), {})
|
||||
return record.get("status")
|
||||
|
||||
def get_ttl(self, identifier):
|
||||
return cache.ttl(self._key(identifier))
|
||||
|
||||
def clear(self, identifier):
|
||||
if not self.enabled:
|
||||
return
|
||||
|
||||
cache.delete(self._key(identifier))
|
||||
|
||||
|
||||
account_secret_task_status = AccountSecretTaskStatus()
|
||||
|
||||
@@ -9,6 +9,5 @@ class ActionChoices(models.TextChoices):
|
||||
warning = 'warning', _('Warn')
|
||||
notice = 'notice', _('Notify')
|
||||
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
|
||||
face_verify = 'face_verify', _('Face verify')
|
||||
face_online = 'face_online', _('Face online')
|
||||
change_secret = 'change_secret', _('Secret rotation')
|
||||
face_verify = 'face_verify', _('Face Verify')
|
||||
face_online = 'face_online', _('Face Online')
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from common.db.fields import JSONManyToManyField
|
||||
from common.db.models import JMSBaseModel
|
||||
from common.utils import contains_ip
|
||||
from common.utils.timezone import contains_time_period
|
||||
from common.utils.time_period import contains_time_period
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
from ..const import ActionChoices
|
||||
|
||||
|
||||
@@ -34,16 +34,16 @@ class CommandGroup(JMSOrgBaseModel):
|
||||
|
||||
@lazyproperty
|
||||
def pattern(self):
|
||||
content = self.content.replace('\r\n', '\n')
|
||||
if self.type == 'command':
|
||||
s = self.construct_command_regex(content)
|
||||
s = self.construct_command_regex(self.content)
|
||||
else:
|
||||
s = r'{0}'.format(r'{}'.format('|'.join(content.split('\n'))))
|
||||
s = r'{0}'.format(self.content)
|
||||
return s
|
||||
|
||||
@classmethod
|
||||
def construct_command_regex(cls, content):
|
||||
regex = []
|
||||
content = content.replace('\r\n', '\n')
|
||||
for _cmd in content.split('\n'):
|
||||
cmd = re.sub(r'\s+', ' ', _cmd)
|
||||
cmd = re.escape(cmd)
|
||||
|
||||
@@ -79,8 +79,6 @@ class ActionAclSerializer(serializers.Serializer):
|
||||
field_action._choices.pop(ActionChoices.face_online, None)
|
||||
for choice in self.Meta.action_choices_exclude:
|
||||
field_action._choices.pop(choice, None)
|
||||
if not settings.XPACK_LICENSE_IS_VALID or not settings.CHANGE_SECRET_AFTER_SESSION_END:
|
||||
field_action._choices.pop(ActionChoices.change_secret, None)
|
||||
|
||||
|
||||
class BaseACLSerializer(ActionAclSerializer, serializers.Serializer):
|
||||
|
||||
@@ -32,12 +32,9 @@ class CommandFilterACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer)
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = CommandFilterACL
|
||||
fields = BaseSerializer.Meta.fields + ['command_groups']
|
||||
action_choices_exclude = [
|
||||
ActionChoices.notice,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.face_online,
|
||||
ActionChoices.change_secret
|
||||
]
|
||||
action_choices_exclude = [ActionChoices.notice,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.face_online]
|
||||
|
||||
|
||||
class CommandReviewSerializer(serializers.Serializer):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from common.serializers.mixin import CommonBulkModelSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||
from ..const import ActionChoices
|
||||
from ..models import ConnectMethodACL
|
||||
@@ -6,17 +6,13 @@ from ..models import ConnectMethodACL
|
||||
__all__ = ["ConnectMethodACLSerializer"]
|
||||
|
||||
|
||||
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
|
||||
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
||||
class Meta(BaseSerializer.Meta):
|
||||
model = ConnectMethodACL
|
||||
fields = [
|
||||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||
if i not in ['assets', 'accounts', 'org_id']
|
||||
if i not in ['assets', 'accounts']
|
||||
]
|
||||
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||
ActionChoices.review,
|
||||
ActionChoices.notice,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.face_online,
|
||||
ActionChoices.change_secret
|
||||
ActionChoices.review, ActionChoices.accept, ActionChoices.notice
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from common.serializers import CommonBulkModelSerializer
|
||||
from common.serializers import MethodSerializer
|
||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
||||
from .base import BaseUserACLSerializer
|
||||
from .rules import RuleSerializer
|
||||
from ..const import ActionChoices
|
||||
@@ -12,18 +12,17 @@ __all__ = ["LoginACLSerializer"]
|
||||
common_help_text = _("With * indicating a match all. ")
|
||||
|
||||
|
||||
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
|
||||
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
||||
rules = MethodSerializer(label=_('Rule'))
|
||||
|
||||
class Meta(BaseUserACLSerializer.Meta):
|
||||
model = LoginACL
|
||||
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}) - {'org_id'})
|
||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
||||
action_choices_exclude = [
|
||||
ActionChoices.warning,
|
||||
ActionChoices.notify_and_warn,
|
||||
ActionChoices.face_online,
|
||||
ActionChoices.face_verify,
|
||||
ActionChoices.change_secret
|
||||
ActionChoices.face_verify
|
||||
]
|
||||
|
||||
def get_rules_serializer(self):
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# coding: utf-8
|
||||
#
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rest_framework import serializers
|
||||
|
||||
@@ -10,7 +8,7 @@ from common.utils.ip import is_ip_address, is_ip_network, is_ip_segment
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text', 'address_validator']
|
||||
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text']
|
||||
|
||||
|
||||
def ip_group_child_validator(ip_group_child):
|
||||
@@ -23,19 +21,6 @@ def ip_group_child_validator(ip_group_child):
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
|
||||
def address_validator(value):
|
||||
parsed = urlparse(value)
|
||||
is_basic_url = parsed.scheme in ('http', 'https') and parsed.netloc
|
||||
is_valid = value == '*' \
|
||||
or is_ip_address(value) \
|
||||
or is_ip_network(value) \
|
||||
or is_ip_segment(value) \
|
||||
or is_basic_url
|
||||
if not is_valid:
|
||||
error = _('address invalid: `{}`').format(value)
|
||||
raise serializers.ValidationError(error)
|
||||
|
||||
|
||||
ip_group_help_text = _(
|
||||
'With * indicating a match all. '
|
||||
'Such as: '
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from .asset import *
|
||||
from .category import *
|
||||
from .domain import *
|
||||
from .favorite_asset import *
|
||||
from .mixin import *
|
||||
from .my_asset import *
|
||||
from .node import *
|
||||
from .platform import *
|
||||
from .protocol import *
|
||||
from .tree import *
|
||||
from .zone import *
|
||||
from .my_asset import *
|
||||
|
||||
@@ -37,12 +37,12 @@ class AssetFilterSet(BaseFilterSet):
|
||||
platform = drf_filters.CharFilter(method='filter_platform')
|
||||
is_gateway = drf_filters.BooleanFilter(method='filter_is_gateway')
|
||||
exclude_platform = drf_filters.CharFilter(field_name="platform__name", lookup_expr='exact', exclude=True)
|
||||
zone = drf_filters.CharFilter(method='filter_zone')
|
||||
domain = drf_filters.CharFilter(method='filter_domain')
|
||||
type = drf_filters.CharFilter(field_name="platform__type", lookup_expr="exact")
|
||||
category = drf_filters.CharFilter(field_name="platform__category", lookup_expr="exact")
|
||||
protocols = drf_filters.CharFilter(method='filter_protocols')
|
||||
gateway_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__gateway_enabled", lookup_expr="exact"
|
||||
domain_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__domain_enabled", lookup_expr="exact"
|
||||
)
|
||||
ping_enabled = drf_filters.BooleanFilter(
|
||||
field_name="platform__automation__ping_enabled", lookup_expr="exact"
|
||||
@@ -85,11 +85,11 @@ class AssetFilterSet(BaseFilterSet):
|
||||
return queryset
|
||||
|
||||
@staticmethod
|
||||
def filter_zone(queryset, name, value):
|
||||
def filter_domain(queryset, name, value):
|
||||
if is_uuid(value):
|
||||
return queryset.filter(zone_id=value)
|
||||
return queryset.filter(domain_id=value)
|
||||
else:
|
||||
return queryset.filter(zone__name__contains=value)
|
||||
return queryset.filter(domain__name__contains=value)
|
||||
|
||||
@staticmethod
|
||||
def filter_protocols(queryset, name, value):
|
||||
@@ -171,10 +171,10 @@ class AssetViewSet(SuggestionMixin, BaseAssetViewSet):
|
||||
@action(methods=["GET"], detail=True, url_path="gateways")
|
||||
def gateways(self, *args, **kwargs):
|
||||
asset = self.get_object()
|
||||
if not asset.zone:
|
||||
if not asset.domain:
|
||||
gateways = Gateway.objects.none()
|
||||
else:
|
||||
gateways = asset.zone.gateways
|
||||
gateways = asset.domain.gateways
|
||||
return self.get_paginated_response_from_queryset(gateways)
|
||||
|
||||
@action(methods=['post'], detail=False, url_path='sync-platform-protocols')
|
||||
|
||||
@@ -16,7 +16,6 @@ class CategoryViewSet(ListModelMixin, JMSGenericViewSet):
|
||||
'types': TypeSerializer,
|
||||
}
|
||||
permission_classes = (IsValidUser,)
|
||||
default_limit = None
|
||||
|
||||
def get_queryset(self):
|
||||
return AllTypes.categories()
|
||||
|
||||
@@ -9,24 +9,24 @@ from common.utils import get_logger
|
||||
from orgs.mixins.api import OrgBulkModelViewSet
|
||||
from .asset import HostViewSet
|
||||
from .. import serializers
|
||||
from ..models import Zone, Gateway
|
||||
from ..models import Domain, Gateway
|
||||
|
||||
logger = get_logger(__file__)
|
||||
__all__ = ['ZoneViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
|
||||
__all__ = ['DomainViewSet', 'GatewayViewSet', "GatewayTestConnectionApi"]
|
||||
|
||||
|
||||
class ZoneViewSet(OrgBulkModelViewSet):
|
||||
model = Zone
|
||||
class DomainViewSet(OrgBulkModelViewSet):
|
||||
model = Domain
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_classes = {
|
||||
'default': serializers.ZoneSerializer,
|
||||
'list': serializers.ZoneListSerializer,
|
||||
'default': serializers.DomainSerializer,
|
||||
'list': serializers.DomainListSerializer,
|
||||
}
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.query_params.get('gateway'):
|
||||
return serializers.ZoneWithGatewaySerializer
|
||||
return serializers.DomainWithGatewaySerializer
|
||||
return super().get_serializer_class()
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
@@ -36,8 +36,8 @@ class ZoneViewSet(OrgBulkModelViewSet):
|
||||
|
||||
class GatewayViewSet(HostViewSet):
|
||||
perm_model = Gateway
|
||||
filterset_fields = ("zone__name", "name", "zone")
|
||||
search_fields = ("zone__name",)
|
||||
filterset_fields = ("domain__name", "name", "domain")
|
||||
search_fields = ("domain__name",)
|
||||
|
||||
def get_serializer_classes(self):
|
||||
serializer_classes = super().get_serializer_classes()
|
||||
@@ -45,7 +45,7 @@ class GatewayViewSet(HostViewSet):
|
||||
return serializer_classes
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Zone.get_gateway_queryset()
|
||||
queryset = Domain.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
|
||||
@@ -55,7 +55,7 @@ class GatewayTestConnectionApi(SingleObjectMixin, APIView):
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = Zone.get_gateway_queryset()
|
||||
queryset = Domain.get_gateway_queryset()
|
||||
return queryset
|
||||
|
||||
def post(self, request, *args, **kwargs):
|
||||
@@ -14,7 +14,6 @@ class FavoriteAssetViewSet(BulkModelViewSet):
|
||||
serializer_class = FavoriteAssetSerializer
|
||||
permission_classes = (IsValidUser,)
|
||||
filterset_fields = ['asset']
|
||||
default_limit = None
|
||||
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
with tmp_to_root_org():
|
||||
|
||||
@@ -7,18 +7,15 @@ from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from assets.const import AllTypes
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
|
||||
from assets.models import Platform, Node, Asset, PlatformProtocol
|
||||
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
|
||||
from common.api import JMSModelViewSet
|
||||
from common.permissions import IsValidUser
|
||||
from common.serializers import GroupedChoiceSerializer
|
||||
from rbac.models import RoleBinding
|
||||
|
||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||
|
||||
|
||||
|
||||
|
||||
class PlatformFilter(filters.FilterSet):
|
||||
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
||||
|
||||
@@ -43,7 +40,6 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
'ops_methods': 'assets.view_platform',
|
||||
'filter_nodes_assets': 'assets.view_platform',
|
||||
}
|
||||
default_limit = None
|
||||
|
||||
def get_queryset(self):
|
||||
# 因为没有走分页逻辑,所以需要这里 prefetch
|
||||
@@ -67,13 +63,6 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
||||
return super().get_object()
|
||||
return self.get_queryset().get(name=pk)
|
||||
|
||||
|
||||
def check_permissions(self, request):
|
||||
if self.action == 'list' and RoleBinding.is_org_admin(request.user):
|
||||
return True
|
||||
else:
|
||||
return super().check_permissions(request)
|
||||
|
||||
def check_object_permissions(self, request, obj):
|
||||
if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal:
|
||||
self.permission_denied(
|
||||
@@ -113,7 +102,6 @@ class PlatformProtocolViewSet(JMSModelViewSet):
|
||||
|
||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||
permission_classes = (IsValidUser,)
|
||||
queryset = PlatformAutomation.objects.none()
|
||||
|
||||
@staticmethod
|
||||
def automation_methods():
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
from rest_framework.generics import ListAPIView
|
||||
|
||||
from assets import serializers
|
||||
from assets.const import Protocol
|
||||
from common.permissions import IsValidUser
|
||||
from assets.models import Protocol
|
||||
|
||||
__all__ = ['ProtocolListApi']
|
||||
|
||||
@@ -13,13 +13,3 @@ class ProtocolListApi(ListAPIView):
|
||||
|
||||
def get_queryset(self):
|
||||
return list(Protocol.protocols())
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
search = self.request.query_params.get("search", "").lower().strip()
|
||||
if not search:
|
||||
return queryset
|
||||
queryset = [
|
||||
p for p in queryset
|
||||
if search in p['label'].lower() or search in p['value'].lower()
|
||||
]
|
||||
return queryset
|
||||
|
||||
@@ -161,7 +161,6 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
|
||||
'GET': 'assets.view_asset',
|
||||
'list': 'assets.view_asset',
|
||||
}
|
||||
queryset = Node.objects.none()
|
||||
|
||||
def get_assets(self):
|
||||
key = self.request.query_params.get('key')
|
||||
|
||||
@@ -17,12 +17,11 @@ from sshtunnel import SSHTunnelForwarder
|
||||
|
||||
from assets.automations.methods import platform_automation_methods
|
||||
from common.const import Status
|
||||
from common.db.utils import safe_atomic_db_connection
|
||||
from common.db.utils import safe_db_connection
|
||||
from common.tasks import send_mail_async
|
||||
from common.utils import get_logger, lazyproperty, is_openssh_format_key, ssh_pubkey_gen
|
||||
from ops.ansible import JMSInventory, DefaultCallback, SuperPlaybookRunner
|
||||
from ops.ansible.interface import interface
|
||||
from users.utils import activate_user_language
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -123,7 +122,9 @@ class BaseManager:
|
||||
self.execution.summary = self.summary
|
||||
self.execution.result = self.result
|
||||
self.execution.status = self.status
|
||||
self.execution.save()
|
||||
|
||||
with safe_db_connection():
|
||||
self.execution.save()
|
||||
|
||||
def print_summary(self):
|
||||
content = "\nSummery: \n"
|
||||
@@ -150,13 +151,12 @@ class BaseManager:
|
||||
if not recipients:
|
||||
return
|
||||
print(f"Send report to: {','.join([str(u) for u in recipients])}")
|
||||
for user in recipients:
|
||||
with activate_user_language(user):
|
||||
report = self.gen_report()
|
||||
report = transform(report, cssutils_logging_level="CRITICAL")
|
||||
subject = self.get_report_subject()
|
||||
emails = [user.email]
|
||||
send_mail_async(subject, report, emails, html_message=report)
|
||||
|
||||
report = self.gen_report()
|
||||
report = transform(report, cssutils_logging_level="CRITICAL")
|
||||
subject = self.get_report_subject()
|
||||
emails = [r.email for r in recipients if r.email]
|
||||
send_mail_async(subject, report, emails, html_message=report)
|
||||
|
||||
def gen_report(self):
|
||||
template_path = self.get_report_template()
|
||||
@@ -165,10 +165,9 @@ class BaseManager:
|
||||
return data
|
||||
|
||||
def post_run(self):
|
||||
with safe_atomic_db_connection():
|
||||
self.update_execution()
|
||||
self.print_summary()
|
||||
self.send_report_if_need()
|
||||
self.update_execution()
|
||||
self.print_summary()
|
||||
self.send_report_if_need()
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
self.pre_run()
|
||||
@@ -201,17 +200,14 @@ class PlaybookPrepareMixin:
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
|
||||
self.method_id_meta_mapper = self.get_method_id_meta_mapper()
|
||||
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
||||
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
||||
self.playbooks = []
|
||||
|
||||
def get_method_id_meta_mapper(self):
|
||||
return {
|
||||
self.method_id_meta_mapper = {
|
||||
method["id"]: method
|
||||
for method in self.platform_automation_methods
|
||||
if method["method"] == self.__class__.method_type()
|
||||
}
|
||||
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
||||
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
||||
self.playbooks = []
|
||||
|
||||
@classmethod
|
||||
def method_type(cls):
|
||||
@@ -550,8 +546,7 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
||||
try:
|
||||
kwargs.update({"clean_workspace": False})
|
||||
cb = runner.run(**kwargs)
|
||||
with safe_atomic_db_connection():
|
||||
self.on_runner_success(runner, cb)
|
||||
self.on_runner_success(runner, cb)
|
||||
except Exception as e:
|
||||
self.on_runner_failed(runner, e, **info)
|
||||
finally:
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
from collections import Counter
|
||||
|
||||
__all__ = ['FormatAssetInfo']
|
||||
|
||||
|
||||
@@ -9,42 +7,13 @@ class FormatAssetInfo:
|
||||
self.tp = tp
|
||||
|
||||
@staticmethod
|
||||
def get_cpu_model_count(cpus):
|
||||
try:
|
||||
if len(cpus) % 3 == 0:
|
||||
step = 3
|
||||
models = [cpus[i + 2] for i in range(0, len(cpus), step)]
|
||||
elif len(cpus) % 2 == 0:
|
||||
step = 2
|
||||
models = [cpus[i + 1] for i in range(0, len(cpus), step)]
|
||||
else:
|
||||
raise ValueError("CPU list format not recognized")
|
||||
|
||||
model_counts = Counter(models)
|
||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||
except Exception as e:
|
||||
print(f"Error processing CPU model list: {e}")
|
||||
result = ''
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def get_gpu_model_count(gpus):
|
||||
try:
|
||||
model_counts = Counter(gpus)
|
||||
|
||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||
except Exception as e:
|
||||
print(f"Error processing GPU model list: {e}")
|
||||
result = ''
|
||||
|
||||
return result
|
||||
|
||||
def posix_format(self, info):
|
||||
cpus = self.get_cpu_model_count(info.get('cpu_model', []))
|
||||
gpus = self.get_gpu_model_count(info.get('gpu_model', []))
|
||||
|
||||
info['gpu_model'] = gpus
|
||||
info['cpu_model'] = cpus
|
||||
def posix_format(info):
|
||||
for cpu_model in info.get('cpu_model', []):
|
||||
if cpu_model.endswith('GHz') or cpu_model.startswith("Intel"):
|
||||
break
|
||||
else:
|
||||
cpu_model = ''
|
||||
info['cpu_model'] = cpu_model[:48]
|
||||
info['cpu_count'] = info.get('cpu_count', 0)
|
||||
return info
|
||||
|
||||
|
||||
@@ -23,16 +23,5 @@
|
||||
arch: "{{ ansible_architecture }}"
|
||||
kernel: "{{ ansible_kernel }}"
|
||||
|
||||
|
||||
- name: Get GPU info with nvidia-smi
|
||||
shell: |
|
||||
nvidia-smi --query-gpu=name,memory.total,driver_version --format=csv,noheader,nounits
|
||||
register: gpu_info
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Merge GPU info into final info
|
||||
set_fact:
|
||||
info: "{{ info | combine({'gpu_model': gpu_info.stdout_lines | default([])}) }}"
|
||||
|
||||
- debug:
|
||||
var: info
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
vars:
|
||||
ansible_shell_type: sh
|
||||
ansible_connection: local
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
ansible_python_interpreter: /opt/py3/bin/python
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection (pyfreerdp)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
ansible_connection: local
|
||||
ansible_shell_type: sh
|
||||
ansible_become: false
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection (paramiko)
|
||||
ssh_ping:
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
vars:
|
||||
ansible_connection: local
|
||||
ansible_shell_type: sh
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test asset connection (telnet)
|
||||
telnet_ping:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test MongoDB connection
|
||||
@@ -17,5 +16,3 @@
|
||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
connection_options:
|
||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||
register: result
|
||||
failed_when: not result.is_available
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test MySQL connection
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test Oracle connection
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test PostgreSQL connection
|
||||
community.postgresql.postgresql_ping:
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
gather_facts: no
|
||||
vars:
|
||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||
ansible_timeout: 30
|
||||
|
||||
tasks:
|
||||
- name: Test SQLServer connection
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user