mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-16 17:12:53 +00:00
Compare commits
186 Commits
v4.10.3-lt
...
pr@dev@fea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ecdc1a056a | ||
|
|
5001ca6960 | ||
|
|
16461b0fa9 | ||
|
|
528b0ea1ba | ||
|
|
60f06adaa9 | ||
|
|
7a6187b95f | ||
|
|
aacaf3a174 | ||
|
|
3c9d2534fa | ||
|
|
4f79abe678 | ||
|
|
ae9956ff91 | ||
|
|
429677e0ce | ||
|
|
034ee65157 | ||
|
|
fdd7d9b6b1 | ||
|
|
db0e21f5d9 | ||
|
|
468b84eb3d | ||
|
|
28d5475d0f | ||
|
|
b9c60d856f | ||
|
|
bd1d73c6dd | ||
|
|
bf92c756d4 | ||
|
|
62ebe0d636 | ||
|
|
0b1fea8492 | ||
|
|
65b5f573f8 | ||
|
|
bb639e1fe7 | ||
|
|
395b868dcf | ||
|
|
1350b774b3 | ||
|
|
af7a00c1b1 | ||
|
|
965ec7007c | ||
|
|
1372fd7535 | ||
|
|
3b0ef4cca7 | ||
|
|
6832abdaad | ||
|
|
c6bf290dbb | ||
|
|
23ab66c11a | ||
|
|
1debaa5547 | ||
|
|
47413966c9 | ||
|
|
703f39607c | ||
|
|
b65ff0d84c | ||
|
|
30d781dd12 | ||
|
|
9551cd4da9 | ||
|
|
87b456c941 | ||
|
|
d4d5224c17 | ||
|
|
dabb30d90a | ||
|
|
82192d38e1 | ||
|
|
571d2b4575 | ||
|
|
ea64313c4e | ||
|
|
8764cdb733 | ||
|
|
980394efed | ||
|
|
2c94f10d64 | ||
|
|
e1c9f5180d | ||
|
|
3f1d7fa230 | ||
|
|
44bcd6e399 | ||
|
|
5f87d98c31 | ||
|
|
540becdcbe | ||
|
|
6929c4968e | ||
|
|
63b213d3a8 | ||
|
|
64fe7a55ec | ||
|
|
27829e09ef | ||
|
|
1bfc7daef6 | ||
|
|
9422aebc5e | ||
|
|
8c0cd20b48 | ||
|
|
0c612648a0 | ||
|
|
36e01a316c | ||
|
|
e1b96e01eb | ||
|
|
144f4b4466 | ||
|
|
8e007004c2 | ||
|
|
c14f740209 | ||
|
|
13a85f062c | ||
|
|
7f9d027bd3 | ||
|
|
c037ce1c29 | ||
|
|
ee7c6b4708 | ||
|
|
d0e625e322 | ||
|
|
c65794a99d | ||
|
|
1e4bca6e24 | ||
|
|
c1c5025fbb | ||
|
|
96020fa6b4 | ||
|
|
5ad6f87a9e | ||
|
|
9b0c73c9f9 | ||
|
|
c029714ffd | ||
|
|
c1e8a1b561 | ||
|
|
21126de2c1 | ||
|
|
7d06819bbe | ||
|
|
92b20fe2ef | ||
|
|
4326d35065 | ||
|
|
4810eae725 | ||
|
|
24f7946b7b | ||
|
|
4b9c4a550e | ||
|
|
d3ec23ba85 | ||
|
|
e3c33bca32 | ||
|
|
0fb7e84678 | ||
|
|
ab30bfb2d2 | ||
|
|
d9d034488f | ||
|
|
24bd7b7e1a | ||
|
|
7fb5fd3956 | ||
|
|
9c621f5ff5 | ||
|
|
ac8998b9ee | ||
|
|
b258537890 | ||
|
|
b38d83c578 | ||
|
|
257f290d18 | ||
|
|
d185be2180 | ||
|
|
4e33b5b478 | ||
|
|
1406437d4e | ||
|
|
e46aa95980 | ||
|
|
c619a35a04 | ||
|
|
29f10bf10e | ||
|
|
a822905ae7 | ||
|
|
dc5a743f4f | ||
|
|
1de8781704 | ||
|
|
f3d9f4c446 | ||
|
|
6b5d5c15ae | ||
|
|
1074a0df19 | ||
|
|
04dca794dd | ||
|
|
14e0396508 | ||
|
|
835eb2e3d0 | ||
|
|
be24f28d9b | ||
|
|
26cea550c4 | ||
|
|
36ae076cb0 | ||
|
|
51c5294fb4 | ||
|
|
da083fffa3 | ||
|
|
1df04d2a94 | ||
|
|
299e52cd11 | ||
|
|
38b268b104 | ||
|
|
6095e9c9bd | ||
|
|
c4a348aac6 | ||
|
|
75575af56f | ||
|
|
8f91cb1473 | ||
|
|
b72e8eba7c | ||
|
|
d1d6f3fe9c | ||
|
|
6095c9865f | ||
|
|
6c374cb41f | ||
|
|
df64145adc | ||
|
|
44d77ba03f | ||
|
|
3af188492f | ||
|
|
9e798cd0b6 | ||
|
|
4d22c0722b | ||
|
|
e6a1662780 | ||
|
|
cc4be36752 | ||
|
|
e1f5d3c737 | ||
|
|
c0adc1fe74 | ||
|
|
613715135b | ||
|
|
fe1d5f9828 | ||
|
|
1d375e15c5 | ||
|
|
ac21d260ea | ||
|
|
accde77307 | ||
|
|
c7dcf1ba59 | ||
|
|
b564bbebb3 | ||
|
|
9440c855f4 | ||
|
|
f282b2079e | ||
|
|
1790cd8345 | ||
|
|
7da74dc6e8 | ||
|
|
33b0068f49 | ||
|
|
9a446c118b | ||
|
|
4bf337b2b4 | ||
|
|
2acbb80920 | ||
|
|
ae859c5562 | ||
|
|
a9bc716af5 | ||
|
|
2d5401e76e | ||
|
|
d933e296bc | ||
|
|
1e5a995917 | ||
|
|
baaaf83ab9 | ||
|
|
ab06ac1f1f | ||
|
|
99c4622ccb | ||
|
|
9bdfab966f | ||
|
|
1a1acb62de | ||
|
|
2a128ea01b | ||
|
|
5a720b41bf | ||
|
|
726c5cf34d | ||
|
|
06afc8a0e1 | ||
|
|
276fd928a7 | ||
|
|
05c6272d7e | ||
|
|
c3f877d116 | ||
|
|
60deef2abf | ||
|
|
058754dc1b | ||
|
|
a238c5d34b | ||
|
|
76c6ed0f95 | ||
|
|
0d07f7421b | ||
|
|
b07d4e207c | ||
|
|
dc92963059 | ||
|
|
9abd708a0a | ||
|
|
c9270877eb | ||
|
|
b5518dd2ba | ||
|
|
1d40f5ecbc | ||
|
|
91fee6c034 | ||
|
|
1b65055c5e | ||
|
|
e79ef516a5 | ||
|
|
8843f247d6 | ||
|
|
cb42df542d | ||
|
|
46ddad1d59 |
11
.prettierrc
Normal file
11
.prettierrc
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"tabWidth": 4,
|
||||||
|
"useTabs": false,
|
||||||
|
"semi": true,
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5",
|
||||||
|
"bracketSpacing": true,
|
||||||
|
"arrowParens": "avoid",
|
||||||
|
"printWidth": 100,
|
||||||
|
"endOfLine": "lf"
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
FROM jumpserver/core-base:20250509_094529 AS stage-build
|
FROM jumpserver/core-base:20250827_025554 AS stage-build
|
||||||
|
|
||||||
ARG VERSION
|
ARG VERSION
|
||||||
|
|
||||||
@@ -33,6 +33,7 @@ ARG TOOLS=" \
|
|||||||
default-libmysqlclient-dev \
|
default-libmysqlclient-dev \
|
||||||
openssh-client \
|
openssh-client \
|
||||||
sshpass \
|
sshpass \
|
||||||
|
nmap \
|
||||||
bubblewrap"
|
bubblewrap"
|
||||||
|
|
||||||
ARG APT_MIRROR=http://deb.debian.org
|
ARG APT_MIRROR=http://deb.debian.org
|
||||||
|
|||||||
@@ -13,7 +13,9 @@ ARG TOOLS=" \
|
|||||||
nmap \
|
nmap \
|
||||||
telnet \
|
telnet \
|
||||||
vim \
|
vim \
|
||||||
wget"
|
postgresql-client-13 \
|
||||||
|
wget \
|
||||||
|
poppler-utils"
|
||||||
|
|
||||||
RUN set -ex \
|
RUN set -ex \
|
||||||
&& apt-get update \
|
&& apt-get update \
|
||||||
@@ -26,5 +28,5 @@ WORKDIR /opt/jumpserver
|
|||||||
ARG PIP_MIRROR=https://pypi.org/simple
|
ARG PIP_MIRROR=https://pypi.org/simple
|
||||||
|
|
||||||
RUN set -ex \
|
RUN set -ex \
|
||||||
&& uv pip install -i${PIP_MIRROR} --group xpack
|
&& uv pip install -i${PIP_MIRROR} --group xpack \
|
||||||
|
&& playwright install chromium --with-deps --only-shell
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
<a name="readme-top"></a>
|
<a name="readme-top"></a>
|
||||||
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
|
||||||
|
|
||||||
## An open-source PAM tool (Bastion Host)
|
## An open-source PAM platform (Bastion Host)
|
||||||
|
|
||||||
[![][license-shield]][license-link]
|
[![][license-shield]][license-link]
|
||||||
[![][docs-shield]][docs-link]
|
[![][docs-shield]][docs-link]
|
||||||
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
## What is JumpServer?
|
## What is JumpServer?
|
||||||
|
|
||||||
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
|
||||||
|
|
||||||
|
|
||||||
<picture>
|
<picture>
|
||||||
@@ -85,6 +85,8 @@ JumpServer consists of multiple key components, which collectively form the func
|
|||||||
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
|
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
|
||||||
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
|
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
|
||||||
|
|
||||||
|
## Third-party projects
|
||||||
|
- [jumpserver-grafana-dashboard](https://github.com/acerrah/jumpserver-grafana-dashboard) JumpServer with grafana dashboard
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
|||||||
@@ -41,8 +41,8 @@ class AccountViewSet(OrgBulkModelViewSet):
|
|||||||
'partial_update': ['accounts.change_account'],
|
'partial_update': ['accounts.change_account'],
|
||||||
'su_from_accounts': 'accounts.view_account',
|
'su_from_accounts': 'accounts.view_account',
|
||||||
'clear_secret': 'accounts.change_account',
|
'clear_secret': 'accounts.change_account',
|
||||||
'move_to_assets': 'accounts.create_account',
|
'move_to_assets': 'accounts.delete_account',
|
||||||
'copy_to_assets': 'accounts.create_account',
|
'copy_to_assets': 'accounts.add_account',
|
||||||
}
|
}
|
||||||
export_as_zip = True
|
export_as_zip = True
|
||||||
|
|
||||||
@@ -190,6 +190,7 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
|
|||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'GET': 'accounts.view_accountsecret',
|
'GET': 'accounts.view_accountsecret',
|
||||||
}
|
}
|
||||||
|
queryset = Account.history.model.objects.none()
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def account(self) -> Account:
|
def account(self) -> Account:
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ __all__ = ['PamDashboardApi']
|
|||||||
class PamDashboardApi(APIView):
|
class PamDashboardApi(APIView):
|
||||||
http_method_names = ['get']
|
http_method_names = ['get']
|
||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'GET': 'accounts.view_account',
|
'GET': 'rbac.view_pam',
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -12,6 +12,8 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
|
|||||||
filterset_fields = ('alias',)
|
filterset_fields = ('alias',)
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
if getattr(self, "swagger_fake_view", False):
|
||||||
|
return VirtualAccount.objects.none()
|
||||||
return VirtualAccount.get_or_init_queryset()
|
return VirtualAccount.get_or_init_queryset()
|
||||||
|
|
||||||
def get_object(self, ):
|
def get_object(self, ):
|
||||||
|
|||||||
@@ -41,6 +41,7 @@ class AutomationAssetsListApi(generics.ListAPIView):
|
|||||||
|
|
||||||
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
||||||
model = BaseAutomation
|
model = BaseAutomation
|
||||||
|
queryset = BaseAutomation.objects.all()
|
||||||
serializer_class = serializers.UpdateAssetSerializer
|
serializer_class = serializers.UpdateAssetSerializer
|
||||||
http_method_names = ['patch']
|
http_method_names = ['patch']
|
||||||
|
|
||||||
@@ -59,6 +60,7 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
|
|||||||
|
|
||||||
class AutomationAddAssetApi(generics.UpdateAPIView):
|
class AutomationAddAssetApi(generics.UpdateAPIView):
|
||||||
model = BaseAutomation
|
model = BaseAutomation
|
||||||
|
queryset = BaseAutomation.objects.all()
|
||||||
serializer_class = serializers.UpdateAssetSerializer
|
serializer_class = serializers.UpdateAssetSerializer
|
||||||
http_method_names = ['patch']
|
http_method_names = ['patch']
|
||||||
|
|
||||||
|
|||||||
@@ -97,12 +97,13 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
|
|||||||
def execute(self, request, *args, **kwargs):
|
def execute(self, request, *args, **kwargs):
|
||||||
record_ids = request.data.get('record_ids')
|
record_ids = request.data.get('record_ids')
|
||||||
records = self.get_queryset().filter(id__in=record_ids)
|
records = self.get_queryset().filter(id__in=record_ids)
|
||||||
execution_count = records.values_list('execution_id', flat=True).distinct().count()
|
if not records.exists():
|
||||||
if execution_count != 1:
|
|
||||||
return Response(
|
return Response(
|
||||||
{'detail': 'Only one execution is allowed to execute'},
|
{'detail': 'No valid records found'},
|
||||||
status=status.HTTP_400_BAD_REQUEST
|
status=status.HTTP_400_BAD_REQUEST
|
||||||
)
|
)
|
||||||
|
|
||||||
|
record_ids = [str(_id) for _id in records.values_list('id', flat=True)]
|
||||||
task = execute_automation_record_task.delay(record_ids, self.tp)
|
task = execute_automation_record_task.delay(record_ids, self.tp)
|
||||||
return Response({'task': task.id}, status=status.HTTP_200_OK)
|
return Response({'task': task.id}, status=status.HTTP_200_OK)
|
||||||
|
|
||||||
@@ -153,12 +154,10 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
|
|||||||
model = ChangeSecretAutomation
|
model = ChangeSecretAutomation
|
||||||
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||||
model = ChangeSecretAutomation
|
model = ChangeSecretAutomation
|
||||||
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
|
||||||
|
|
||||||
|
|
||||||
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
|
||||||
perm_model = ChangeSecretAutomation
|
perm_model = ChangeSecretAutomation
|
||||||
filterset_class = ChangeSecretStatusFilterSet
|
filterset_class = ChangeSecretStatusFilterSet
|
||||||
|
|||||||
@@ -62,7 +62,8 @@ class ChangeSecretDashboardApi(APIView):
|
|||||||
status_counts = defaultdict(lambda: defaultdict(int))
|
status_counts = defaultdict(lambda: defaultdict(int))
|
||||||
|
|
||||||
for date_finished, status in results:
|
for date_finished, status in results:
|
||||||
date_str = str(date_finished.date())
|
dt_local = timezone.localtime(date_finished)
|
||||||
|
date_str = str(dt_local.date())
|
||||||
if status == ChangeSecretRecordStatusChoice.failed:
|
if status == ChangeSecretRecordStatusChoice.failed:
|
||||||
status_counts[date_str]['failed'] += 1
|
status_counts[date_str]['failed'] += 1
|
||||||
elif status == ChangeSecretRecordStatusChoice.success:
|
elif status == ChangeSecretRecordStatusChoice.success:
|
||||||
@@ -90,10 +91,10 @@ class ChangeSecretDashboardApi(APIView):
|
|||||||
|
|
||||||
def get_change_secret_asset_queryset(self):
|
def get_change_secret_asset_queryset(self):
|
||||||
qs = self.change_secrets_queryset
|
qs = self.change_secrets_queryset
|
||||||
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
|
node_ids = qs.values_list('nodes', flat=True).distinct()
|
||||||
nodes = Node.objects.filter(id__in=node_ids)
|
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
|
||||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
|
||||||
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
|
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
|
||||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||||
return Asset.objects.filter(id__in=asset_ids)
|
return Asset.objects.filter(id__in=asset_ids)
|
||||||
|
|
||||||
|
|||||||
@@ -45,10 +45,10 @@ class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
|
|||||||
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
|
||||||
rbac_perms = (
|
rbac_perms = (
|
||||||
("list", "accounts.view_checkaccountexecution"),
|
("list", "accounts.view_checkaccountexecution"),
|
||||||
("retrieve", "accounts.view_checkaccountsexecution"),
|
("retrieve", "accounts.view_checkaccountexecution"),
|
||||||
("create", "accounts.add_checkaccountexecution"),
|
("create", "accounts.add_checkaccountexecution"),
|
||||||
("adhoc", "accounts.add_checkaccountexecution"),
|
("adhoc", "accounts.add_checkaccountexecution"),
|
||||||
("report", "accounts.view_checkaccountsexecution"),
|
("report", "accounts.view_checkaccountexecution"),
|
||||||
)
|
)
|
||||||
ordering = ("-date_created",)
|
ordering = ("-date_created",)
|
||||||
tp = AutomationTypes.check_account
|
tp = AutomationTypes.check_account
|
||||||
@@ -150,6 +150,9 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
|
|||||||
http_method_names = ['get', 'options']
|
http_method_names = ['get', 'options']
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
|
if getattr(self, "swagger_fake_view", False):
|
||||||
|
return CheckAccountEngine.objects.none()
|
||||||
|
|
||||||
return CheckAccountEngine.get_default_engines()
|
return CheckAccountEngine.get_default_engines()
|
||||||
|
|
||||||
def filter_queryset(self, queryset: list):
|
def filter_queryset(self, queryset: list):
|
||||||
|
|||||||
@@ -63,12 +63,10 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
|
|||||||
model = PushAccountAutomation
|
model = PushAccountAutomation
|
||||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
class PushAccountAddAssetApi(AutomationAddAssetApi):
|
||||||
model = PushAccountAutomation
|
model = PushAccountAutomation
|
||||||
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
serializer_class = serializers.PushAccountUpdateAssetSerializer
|
||||||
|
|
||||||
|
|
||||||
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
|
||||||
model = PushAccountAutomation
|
model = PushAccountAutomation
|
||||||
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
serializer_class = serializers.PushAccountUpdateNodeSerializer
|
||||||
@@ -105,6 +105,10 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
|||||||
h['account']['mode'] = 'sysdba' if account.privileged else None
|
h['account']['mode'] = 'sysdba' if account.privileged else None
|
||||||
return h
|
return h
|
||||||
|
|
||||||
|
def add_extra_params(self, host, **kwargs):
|
||||||
|
host['ssh_params'] = {}
|
||||||
|
return host
|
||||||
|
|
||||||
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
|
||||||
host = super().host_callback(
|
host = super().host_callback(
|
||||||
host, asset=asset, account=account, automation=automation,
|
host, asset=asset, account=account, automation=automation,
|
||||||
@@ -113,8 +117,7 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
|
|||||||
if host.get('error'):
|
if host.get('error'):
|
||||||
return host
|
return host
|
||||||
|
|
||||||
host['ssh_params'] = {}
|
host = self.add_extra_params(host, automation=automation)
|
||||||
|
|
||||||
accounts = self.get_accounts(account)
|
accounts = self.get_accounts(account)
|
||||||
existing_ids = set(map(str, accounts.values_list('id', flat=True)))
|
existing_ids = set(map(str, accounts.values_list('id', flat=True)))
|
||||||
missing_ids = set(map(str, self.account_ids)) - existing_ids
|
missing_ids = set(map(str, self.account_ids)) - existing_ids
|
||||||
|
|||||||
@@ -54,3 +54,5 @@
|
|||||||
connection_options:
|
connection_options:
|
||||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||||
when: check_conn_after_change
|
when: check_conn_after_change
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
@@ -39,7 +39,8 @@
|
|||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret }}"
|
password: "{{ account.secret }}"
|
||||||
host: "%"
|
host: "%"
|
||||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||||
|
append_privs: "{{ db_name != '' | bool }}"
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
when: db_info is succeeded
|
when: db_info is succeeded
|
||||||
|
|
||||||
|
|||||||
@@ -56,3 +56,5 @@
|
|||||||
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
|
||||||
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
|
||||||
when: check_conn_after_change
|
when: check_conn_after_change
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ type:
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
@@ -24,3 +24,7 @@ i18n:
|
|||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ type:
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
@@ -25,3 +25,8 @@ i18n:
|
|||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|
||||||
|
|||||||
@@ -9,19 +9,24 @@ priority: 49
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
|
|
||||||
i18n:
|
i18n:
|
||||||
Windows account change secret rdp verify:
|
Windows account change secret rdp verify:
|
||||||
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密 RDP 协议测试最后的可连接性'
|
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密(最后使用 Python 模块 pyfreerdp 验证账号的可连接性)'
|
||||||
ja: 'Ansibleモジュールwin_userはWindowsアカウントの改密RDPプロトコルテストの最後の接続性を実行する'
|
ja: 'Ansible モジュール win_user を使用して Windows アカウントのパスワードを変更します (最後に Python モジュール pyfreerdp を使用してアカウントの接続を確認します)'
|
||||||
en: 'Using the Ansible module win_user performs Windows account encryption RDP protocol testing for final connectivity'
|
en: 'Use the Ansible module win_user to change the Windows account password (finally use the Python module pyfreerdp to verify the account connectivity)'
|
||||||
|
|
||||||
Params groups help text:
|
Params groups help text:
|
||||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,9 @@ from django.conf import settings
|
|||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from xlsxwriter import Workbook
|
from xlsxwriter import Workbook
|
||||||
|
|
||||||
|
from assets.automations.methods import platform_automation_methods as asset_methods
|
||||||
|
from assets.const import AutomationTypes as AssetAutomationTypes
|
||||||
|
from accounts.automations.methods import platform_automation_methods as account_methods
|
||||||
from accounts.const import (
|
from accounts.const import (
|
||||||
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
|
||||||
)
|
)
|
||||||
@@ -22,6 +25,22 @@ logger = get_logger(__name__)
|
|||||||
class ChangeSecretManager(BaseChangeSecretPushManager):
|
class ChangeSecretManager(BaseChangeSecretPushManager):
|
||||||
ansible_account_prefer = ''
|
ansible_account_prefer = ''
|
||||||
|
|
||||||
|
def get_method_id_meta_mapper(self):
|
||||||
|
return {
|
||||||
|
method["id"]: method for method in self.platform_automation_methods
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def platform_automation_methods(self):
|
||||||
|
return asset_methods + account_methods
|
||||||
|
|
||||||
|
def add_extra_params(self, host, **kwargs):
|
||||||
|
host = super().add_extra_params(host, **kwargs)
|
||||||
|
automation = kwargs.get('automation')
|
||||||
|
for extra_type in [AssetAutomationTypes.ping, AutomationTypes.verify_account]:
|
||||||
|
host[f"{extra_type}_params"] = self.get_params(automation, extra_type)
|
||||||
|
return host
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def method_type(cls):
|
def method_type(cls):
|
||||||
return AutomationTypes.change_secret
|
return AutomationTypes.change_secret
|
||||||
|
|||||||
36
apps/accounts/automations/change_secret/web/website/main.yml
Normal file
36
apps/accounts/automations/change_secret/web/website/main.yml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
- hosts: website
|
||||||
|
gather_facts: no
|
||||||
|
vars:
|
||||||
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Test privileged account
|
||||||
|
website_ping:
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_user: "{{ jms_account.username }}"
|
||||||
|
login_password: "{{ jms_account.secret }}"
|
||||||
|
steps: "{{ ping_params.steps }}"
|
||||||
|
load_state: "{{ ping_params.load_state }}"
|
||||||
|
|
||||||
|
- name: "Change {{ account.username }} password"
|
||||||
|
website_user:
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_user: "{{ jms_account.username }}"
|
||||||
|
login_password: "{{ jms_account.secret }}"
|
||||||
|
steps: "{{ params.steps }}"
|
||||||
|
load_state: "{{ params.load_state }}"
|
||||||
|
name: "{{ account.username }}"
|
||||||
|
password: "{{ account.secret }}"
|
||||||
|
ignore_errors: true
|
||||||
|
register: change_secret_result
|
||||||
|
|
||||||
|
- name: "Verify {{ account.username }} password"
|
||||||
|
website_ping:
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_user: "{{ account.username }}"
|
||||||
|
login_password: "{{ account.secret }}"
|
||||||
|
steps: "{{ verify_account_params.steps }}"
|
||||||
|
load_state: "{{ verify_account_params.load_state }}"
|
||||||
|
when:
|
||||||
|
- check_conn_after_change or change_secret_result.failed | default(false)
|
||||||
|
delegate_to: localhost
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
id: change_account_website
|
||||||
|
name: "{{ 'Website account change secret' | trans }}"
|
||||||
|
category: web
|
||||||
|
type:
|
||||||
|
- website
|
||||||
|
method: change_secret
|
||||||
|
priority: 50
|
||||||
|
params:
|
||||||
|
- name: load_state
|
||||||
|
type: choice
|
||||||
|
label: "{{ 'Load state' | trans }}"
|
||||||
|
choices:
|
||||||
|
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||||
|
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||||
|
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||||
|
default: 'load'
|
||||||
|
- name: steps
|
||||||
|
type: list
|
||||||
|
default: [ ]
|
||||||
|
label: "{{ 'Steps' | trans }}"
|
||||||
|
help_text: "{{ 'Params step help text' | trans }}"
|
||||||
|
|
||||||
|
i18n:
|
||||||
|
Website account change secret:
|
||||||
|
zh: 使用 Playwright 模拟浏览器变更账号密码
|
||||||
|
ja: Playwright を使用してブラウザをシミュレートし、アカウントのパスワードを変更します
|
||||||
|
en: Use Playwright to simulate a browser for account password change.
|
||||||
|
Load state:
|
||||||
|
zh: 加载状态检测
|
||||||
|
en: Load state detection
|
||||||
|
ja: ロード状態の検出
|
||||||
|
Steps:
|
||||||
|
zh: 步骤
|
||||||
|
en: Steps
|
||||||
|
ja: 手順
|
||||||
|
Network idle:
|
||||||
|
zh: 网络空闲
|
||||||
|
en: Network idle
|
||||||
|
ja: ネットワークが空いた状態
|
||||||
|
Dom content loaded:
|
||||||
|
zh: 文档内容加载完成
|
||||||
|
en: Dom content loaded
|
||||||
|
ja: ドキュメントの内容がロードされた状態
|
||||||
|
Load completed:
|
||||||
|
zh: 全部加载完成
|
||||||
|
en: All load completed
|
||||||
|
ja: すべてのロードが完了した状態
|
||||||
|
Params step help text:
|
||||||
|
zh: 根据配置决定任务执行步骤
|
||||||
|
ja: 設定に基づいてタスクの実行ステップを決定する
|
||||||
|
en: Determine task execution steps based on configuration
|
||||||
@@ -15,11 +15,13 @@ from common.decorators import bulk_create_decorator, bulk_update_decorator
|
|||||||
from settings.models import LeakPasswords
|
from settings.models import LeakPasswords
|
||||||
|
|
||||||
|
|
||||||
|
# 已设置手动 finish
|
||||||
@bulk_create_decorator(AccountRisk)
|
@bulk_create_decorator(AccountRisk)
|
||||||
def create_risk(data):
|
def create_risk(data):
|
||||||
return AccountRisk(**data)
|
return AccountRisk(**data)
|
||||||
|
|
||||||
|
|
||||||
|
# 已设置手动 finish
|
||||||
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
|
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
|
||||||
def update_risk(risk):
|
def update_risk(risk):
|
||||||
return risk
|
return risk
|
||||||
@@ -217,6 +219,9 @@ class CheckAccountManager(BaseManager):
|
|||||||
"details": [{"datetime": now, 'type': 'init'}],
|
"details": [{"datetime": now, 'type': 'init'}],
|
||||||
})
|
})
|
||||||
|
|
||||||
|
create_risk.finish()
|
||||||
|
update_risk.finish()
|
||||||
|
|
||||||
def pre_run(self):
|
def pre_run(self):
|
||||||
super().pre_run()
|
super().pre_run()
|
||||||
self.assets = self.execution.get_all_assets()
|
self.assets = self.execution.get_all_assets()
|
||||||
@@ -235,6 +240,11 @@ class CheckAccountManager(BaseManager):
|
|||||||
|
|
||||||
print("Check: {} => {}".format(account, msg))
|
print("Check: {} => {}".format(account, msg))
|
||||||
if not error:
|
if not error:
|
||||||
|
AccountRisk.objects.filter(
|
||||||
|
asset=account.asset,
|
||||||
|
username=account.username,
|
||||||
|
risk=handler.risk
|
||||||
|
).delete()
|
||||||
continue
|
continue
|
||||||
self.add_risk(handler.risk, account)
|
self.add_risk(handler.risk, account)
|
||||||
self.commit_risks(_assets)
|
self.commit_risks(_assets)
|
||||||
|
|||||||
@@ -30,6 +30,16 @@ common_risk_items = [
|
|||||||
diff_items = risk_items + common_risk_items
|
diff_items = risk_items + common_risk_items
|
||||||
|
|
||||||
|
|
||||||
|
@bulk_create_decorator(AccountRisk)
|
||||||
|
def _create_risk(data):
|
||||||
|
return AccountRisk(**data)
|
||||||
|
|
||||||
|
|
||||||
|
@bulk_update_decorator(AccountRisk, update_fields=["details"])
|
||||||
|
def _update_risk(account):
|
||||||
|
return account
|
||||||
|
|
||||||
|
|
||||||
def format_datetime(value):
|
def format_datetime(value):
|
||||||
if isinstance(value, timezone.datetime):
|
if isinstance(value, timezone.datetime):
|
||||||
return value.strftime("%Y-%m-%d %H:%M:%S")
|
return value.strftime("%Y-%m-%d %H:%M:%S")
|
||||||
@@ -141,25 +151,17 @@ class AnalyseAccountRisk:
|
|||||||
found = assets_risks.get(key)
|
found = assets_risks.get(key)
|
||||||
|
|
||||||
if not found:
|
if not found:
|
||||||
self._create_risk(dict(**d, details=[detail]))
|
_create_risk(dict(**d, details=[detail]))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
found.details.append(detail)
|
found.details.append(detail)
|
||||||
self._update_risk(found)
|
_update_risk(found)
|
||||||
|
|
||||||
@bulk_create_decorator(AccountRisk)
|
|
||||||
def _create_risk(self, data):
|
|
||||||
return AccountRisk(**data)
|
|
||||||
|
|
||||||
@bulk_update_decorator(AccountRisk, update_fields=["details"])
|
|
||||||
def _update_risk(self, account):
|
|
||||||
return account
|
|
||||||
|
|
||||||
def lost_accounts(self, asset, lost_users):
|
def lost_accounts(self, asset, lost_users):
|
||||||
if not self.check_risk:
|
if not self.check_risk:
|
||||||
return
|
return
|
||||||
for user in lost_users:
|
for user in lost_users:
|
||||||
self._create_risk(
|
_create_risk(
|
||||||
dict(
|
dict(
|
||||||
asset_id=str(asset.id),
|
asset_id=str(asset.id),
|
||||||
username=user,
|
username=user,
|
||||||
@@ -176,7 +178,7 @@ class AnalyseAccountRisk:
|
|||||||
self._analyse_item_changed(ga, d)
|
self._analyse_item_changed(ga, d)
|
||||||
if not sys_found:
|
if not sys_found:
|
||||||
basic = {"asset": asset, "username": d["username"], 'gathered_account': ga}
|
basic = {"asset": asset, "username": d["username"], 'gathered_account': ga}
|
||||||
self._create_risk(
|
_create_risk(
|
||||||
dict(
|
dict(
|
||||||
**basic,
|
**basic,
|
||||||
risk=RiskChoice.new_found,
|
risk=RiskChoice.new_found,
|
||||||
@@ -388,6 +390,7 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||||||
self.update_gathered_account(ori_account, d)
|
self.update_gathered_account(ori_account, d)
|
||||||
ori_found = username in ori_users
|
ori_found = username in ori_users
|
||||||
need_analyser_gather_account.append((asset, ga, d, ori_found))
|
need_analyser_gather_account.append((asset, ga, d, ori_found))
|
||||||
|
# 这里顺序不能调整,risk 外键关联了 gathered_account 主键 id,所以在创建 risk 需要保证 gathered_account 已经创建完成
|
||||||
self.create_gathered_account.finish()
|
self.create_gathered_account.finish()
|
||||||
self.update_gathered_account.finish()
|
self.update_gathered_account.finish()
|
||||||
for analysis_data in need_analyser_gather_account:
|
for analysis_data in need_analyser_gather_account:
|
||||||
@@ -403,6 +406,9 @@ class GatherAccountsManager(AccountBasePlaybookManager):
|
|||||||
present=True
|
present=True
|
||||||
)
|
)
|
||||||
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
|
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
|
||||||
|
_update_risk.finish()
|
||||||
|
_create_risk.finish()
|
||||||
|
|
||||||
time.sleep(0.5)
|
time.sleep(0.5)
|
||||||
|
|
||||||
def get_report_template(self):
|
def get_report_template(self):
|
||||||
|
|||||||
@@ -54,3 +54,5 @@
|
|||||||
connection_options:
|
connection_options:
|
||||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||||
when: check_conn_after_change
|
when: check_conn_after_change
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|||||||
@@ -39,7 +39,8 @@
|
|||||||
name: "{{ account.username }}"
|
name: "{{ account.username }}"
|
||||||
password: "{{ account.secret }}"
|
password: "{{ account.secret }}"
|
||||||
host: "%"
|
host: "%"
|
||||||
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
|
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
|
||||||
|
append_privs: "{{ db_name != '' | bool }}"
|
||||||
ignore_errors: true
|
ignore_errors: true
|
||||||
when: db_info is succeeded
|
when: db_info is succeeded
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ type:
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
@@ -22,3 +22,8 @@ i18n:
|
|||||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
@@ -9,7 +9,7 @@ type:
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
@@ -23,3 +23,8 @@ i18n:
|
|||||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ priority: 49
|
|||||||
params:
|
params:
|
||||||
- name: groups
|
- name: groups
|
||||||
type: str
|
type: str
|
||||||
label: '用户组'
|
label: "{{ 'Params groups label' | trans }}"
|
||||||
default: 'Users,Remote Desktop Users'
|
default: 'Users,Remote Desktop Users'
|
||||||
help_text: "{{ 'Params groups help text' | trans }}"
|
help_text: "{{ 'Params groups help text' | trans }}"
|
||||||
|
|
||||||
@@ -23,3 +23,8 @@ i18n:
|
|||||||
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
|
||||||
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
|
||||||
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
|
||||||
|
|
||||||
|
Params groups label:
|
||||||
|
zh: '用户组'
|
||||||
|
ja: 'グループ'
|
||||||
|
en: 'Groups'
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
vars:
|
vars:
|
||||||
ansible_shell_type: sh
|
ansible_shell_type: sh
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
ansible_python_interpreter: /opt/py3/bin/python
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Verify account (pyfreerdp)
|
- name: Verify account (pyfreerdp)
|
||||||
|
|||||||
@@ -16,3 +16,5 @@
|
|||||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||||
connection_options:
|
connection_options:
|
||||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
|
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|||||||
@@ -0,0 +1,13 @@
|
|||||||
|
- hosts: website
|
||||||
|
gather_facts: no
|
||||||
|
vars:
|
||||||
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Verify account
|
||||||
|
website_ping:
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_user: "{{ account.username }}"
|
||||||
|
login_password: "{{ account.secret }}"
|
||||||
|
steps: "{{ params.steps }}"
|
||||||
|
load_state: "{{ params.load_state }}"
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
id: verify_account_website
|
||||||
|
name: "{{ 'Website account verify' | trans }}"
|
||||||
|
category: web
|
||||||
|
type:
|
||||||
|
- website
|
||||||
|
method: verify_account
|
||||||
|
priority: 50
|
||||||
|
params:
|
||||||
|
- name: load_state
|
||||||
|
type: choice
|
||||||
|
label: "{{ 'Load state' | trans }}"
|
||||||
|
choices:
|
||||||
|
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||||
|
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||||
|
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||||
|
default: 'load'
|
||||||
|
- name: steps
|
||||||
|
type: list
|
||||||
|
label: "{{ 'Steps' | trans }}"
|
||||||
|
help_text: "{{ 'Params step help text' | trans }}"
|
||||||
|
default: []
|
||||||
|
i18n:
|
||||||
|
Website account verify:
|
||||||
|
zh: 使用 Playwright 模拟浏览器验证账号
|
||||||
|
ja: Playwright を使用してブラウザをシミュレートし、アカウントの検証を行います
|
||||||
|
en: Use Playwright to simulate a browser for account verification.
|
||||||
|
Load state:
|
||||||
|
zh: 加载状态检测
|
||||||
|
en: Load state detection
|
||||||
|
ja: ロード状態の検出
|
||||||
|
Steps:
|
||||||
|
zh: 步骤
|
||||||
|
en: Steps
|
||||||
|
ja: 手順
|
||||||
|
Network idle:
|
||||||
|
zh: 网络空闲
|
||||||
|
en: Network idle
|
||||||
|
ja: ネットワークが空いた状態
|
||||||
|
Dom content loaded:
|
||||||
|
zh: 文档内容加载完成
|
||||||
|
en: Dom content loaded
|
||||||
|
ja: ドキュメントの内容がロードされた状態
|
||||||
|
Load completed:
|
||||||
|
zh: 全部加载完成
|
||||||
|
en: All load completed
|
||||||
|
ja: すべてのロードが完了した状態
|
||||||
|
Params step help text:
|
||||||
|
zh: 配置步骤,根据配置决定任务执行步骤
|
||||||
|
ja: パラメータを設定し、設定に基づいてタスクの実行手順を決定します
|
||||||
|
en: Configure steps, and determine the task execution steps based on the configuration.
|
||||||
@@ -1,8 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
|
||||||
from azure.identity import ClientSecretCredential
|
|
||||||
from azure.keyvault.secrets import SecretClient
|
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
|
||||||
@@ -14,6 +11,9 @@ __all__ = ['AZUREVaultClient']
|
|||||||
class AZUREVaultClient(object):
|
class AZUREVaultClient(object):
|
||||||
|
|
||||||
def __init__(self, vault_url, tenant_id, client_id, client_secret):
|
def __init__(self, vault_url, tenant_id, client_id, client_secret):
|
||||||
|
from azure.identity import ClientSecretCredential
|
||||||
|
from azure.keyvault.secrets import SecretClient
|
||||||
|
|
||||||
authentication_endpoint = 'https://login.microsoftonline.com/' \
|
authentication_endpoint = 'https://login.microsoftonline.com/' \
|
||||||
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
|
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
|
||||||
|
|
||||||
@@ -23,6 +23,8 @@ class AZUREVaultClient(object):
|
|||||||
self.client = SecretClient(vault_url=vault_url, credential=credentials)
|
self.client = SecretClient(vault_url=vault_url, credential=credentials)
|
||||||
|
|
||||||
def is_active(self):
|
def is_active(self):
|
||||||
|
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.client.set_secret('jumpserver', '666')
|
self.client.set_secret('jumpserver', '666')
|
||||||
except (ResourceNotFoundError, ClientAuthenticationError) as e:
|
except (ResourceNotFoundError, ClientAuthenticationError) as e:
|
||||||
@@ -32,6 +34,8 @@ class AZUREVaultClient(object):
|
|||||||
return True, ''
|
return True, ''
|
||||||
|
|
||||||
def get(self, name, version=None):
|
def get(self, name, version=None):
|
||||||
|
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
|
||||||
|
|
||||||
try:
|
try:
|
||||||
secret = self.client.get_secret(name, version)
|
secret = self.client.get_secret(name, version)
|
||||||
return secret.value
|
return secret.value
|
||||||
|
|||||||
@@ -46,11 +46,16 @@ class Migration(migrations.Migration):
|
|||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
'verbose_name': 'Account',
|
'verbose_name': 'Account',
|
||||||
'permissions': [('view_accountsecret', 'Can view asset account secret'),
|
'permissions': [
|
||||||
('view_historyaccount', 'Can view asset history account'),
|
('view_accountsecret', 'Can view asset account secret'),
|
||||||
('view_historyaccountsecret', 'Can view asset history account secret'),
|
('view_historyaccount', 'Can view asset history account'),
|
||||||
('verify_account', 'Can verify account'), ('push_account', 'Can push account'),
|
('view_historyaccountsecret', 'Can view asset history account secret'),
|
||||||
('remove_account', 'Can remove account')],
|
('verify_account', 'Can verify account'),
|
||||||
|
('push_account', 'Can push account'),
|
||||||
|
('remove_account', 'Can remove account'),
|
||||||
|
('view_accountsession', 'Can view session'),
|
||||||
|
('view_accountactivity', 'Can view activity')
|
||||||
|
],
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
|||||||
@@ -335,6 +335,7 @@ class Migration(migrations.Migration):
|
|||||||
],
|
],
|
||||||
options={
|
options={
|
||||||
"abstract": False,
|
"abstract": False,
|
||||||
|
"verbose_name": "Check engine",
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
migrations.CreateModel(
|
migrations.CreateModel(
|
||||||
|
|||||||
@@ -116,6 +116,8 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
|||||||
('verify_account', _('Can verify account')),
|
('verify_account', _('Can verify account')),
|
||||||
('push_account', _('Can push account')),
|
('push_account', _('Can push account')),
|
||||||
('remove_account', _('Can remove account')),
|
('remove_account', _('Can remove account')),
|
||||||
|
('view_accountsession', _('Can view session')),
|
||||||
|
('view_accountactivity', _('Can view activity')),
|
||||||
]
|
]
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@@ -130,7 +132,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
|||||||
return self.asset.platform
|
return self.asset.platform
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def alias(self):
|
def alias(self) -> str:
|
||||||
"""
|
"""
|
||||||
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
|
||||||
"""
|
"""
|
||||||
@@ -138,13 +140,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
|||||||
return self.username
|
return self.username
|
||||||
return str(self.id)
|
return str(self.id)
|
||||||
|
|
||||||
def is_virtual(self):
|
def is_virtual(self) -> bool:
|
||||||
"""
|
"""
|
||||||
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
|
||||||
"""
|
"""
|
||||||
return self.alias.startswith('@')
|
return self.alias.startswith('@')
|
||||||
|
|
||||||
def is_ds_account(self):
|
def is_ds_account(self) -> bool:
|
||||||
if self.is_virtual():
|
if self.is_virtual():
|
||||||
return ''
|
return ''
|
||||||
if not self.asset.is_directory_service:
|
if not self.asset.is_directory_service:
|
||||||
@@ -158,7 +160,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
|||||||
return self.asset.ds
|
return self.asset.ds
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def ds_domain(self):
|
def ds_domain(self) -> str:
|
||||||
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
"""这个不能去掉,perm_account 会动态设置这个值,以更改 full_username"""
|
||||||
if self.is_virtual():
|
if self.is_virtual():
|
||||||
return ''
|
return ''
|
||||||
@@ -170,17 +172,17 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
|
|||||||
return '@' in self.username or '\\' in self.username
|
return '@' in self.username or '\\' in self.username
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def full_username(self):
|
def full_username(self) -> str:
|
||||||
if not self.username_has_domain() and self.ds_domain:
|
if not self.username_has_domain() and self.ds_domain:
|
||||||
return '{}@{}'.format(self.username, self.ds_domain)
|
return '{}@{}'.format(self.username, self.ds_domain)
|
||||||
return self.username
|
return self.username
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def has_secret(self):
|
def has_secret(self) -> bool:
|
||||||
return bool(self.secret)
|
return bool(self.secret)
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def versions(self):
|
def versions(self) -> int:
|
||||||
return self.history.count()
|
return self.history.count()
|
||||||
|
|
||||||
def get_su_from_accounts(self):
|
def get_su_from_accounts(self):
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
|
|||||||
return qs.filter(*query)
|
return qs.filter(*query)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def accounts_amount(self):
|
def accounts_amount(self) -> int:
|
||||||
return self.get_accounts().count()
|
return self.get_accounts().count()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -68,8 +68,10 @@ class AccountRisk(JMSOrgBaseModel):
|
|||||||
related_name='risks', null=True
|
related_name='risks', null=True
|
||||||
)
|
)
|
||||||
risk = models.CharField(max_length=128, verbose_name=_('Risk'), choices=RiskChoice.choices)
|
risk = models.CharField(max_length=128, verbose_name=_('Risk'), choices=RiskChoice.choices)
|
||||||
status = models.CharField(max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
status = models.CharField(
|
||||||
blank=True, verbose_name=_('Status'))
|
max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
|
||||||
|
blank=True, verbose_name=_('Status')
|
||||||
|
)
|
||||||
details = models.JSONField(default=list, verbose_name=_('Detail'))
|
details = models.JSONField(default=list, verbose_name=_('Detail'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -119,6 +121,9 @@ class CheckAccountEngine(JMSBaseModel):
|
|||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
verbose_name = _('Check engine')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_default_engines():
|
def get_default_engines():
|
||||||
data = [
|
data = [
|
||||||
|
|||||||
@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
|||||||
return bool(self.secret)
|
return bool(self.secret)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def has_username(self):
|
def has_username(self) -> bool:
|
||||||
return bool(self.username)
|
return bool(self.username)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def spec_info(self):
|
def spec_info(self) -> dict:
|
||||||
data = {}
|
data = {}
|
||||||
if self.secret_type != SecretType.SSH_KEY:
|
if self.secret_type != SecretType.SSH_KEY:
|
||||||
return data
|
return data
|
||||||
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def password(self):
|
def password(self) -> str:
|
||||||
if self.secret_type == SecretType.PASSWORD:
|
if self.secret_type == SecretType.PASSWORD:
|
||||||
return self.secret
|
return self.secret
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def private_key(self):
|
def private_key(self) -> str:
|
||||||
if self.secret_type == SecretType.SSH_KEY:
|
if self.secret_type == SecretType.SSH_KEY:
|
||||||
return self.secret
|
return self.secret
|
||||||
return None
|
return None
|
||||||
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ssh_key_fingerprint(self):
|
def ssh_key_fingerprint(self) -> str:
|
||||||
if self.public_key:
|
if self.public_key:
|
||||||
public_key = self.public_key
|
public_key = self.public_key
|
||||||
elif self.private_key:
|
elif self.private_key:
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
|
|||||||
__secret = None
|
__secret = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def secret(self):
|
def secret(self) -> str:
|
||||||
if self.__secret:
|
if self.__secret:
|
||||||
return self.__secret
|
return self.__secret
|
||||||
from accounts.backends import vault_client
|
from accounts.backends import vault_client
|
||||||
|
|||||||
@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
|
|||||||
verbose_name = _('Virtual account')
|
verbose_name = _('Virtual account')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self) -> str:
|
||||||
return self.get_alias_display()
|
return self.get_alias_display()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def username(self):
|
def username(self) -> str:
|
||||||
usernames_map = {
|
usernames_map = {
|
||||||
AliasAccount.INPUT: _("Manual input"),
|
AliasAccount.INPUT: _("Manual input"),
|
||||||
AliasAccount.USER: _("Same with user"),
|
AliasAccount.USER: _("Same with user"),
|
||||||
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
|
|||||||
return usernames_map.get(self.alias, '')
|
return usernames_map.get(self.alias, '')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def comment(self):
|
def comment(self) -> str:
|
||||||
comments_map = {
|
comments_map = {
|
||||||
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
|
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
|
||||||
AliasAccount.USER: _('The account username name same with user on connect'),
|
AliasAccount.USER: _('The account username name same with user on connect'),
|
||||||
|
|||||||
@@ -456,6 +456,8 @@ class AssetAccountBulkSerializer(
|
|||||||
|
|
||||||
|
|
||||||
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
||||||
|
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||||
|
|
||||||
class Meta(AccountSerializer.Meta):
|
class Meta(AccountSerializer.Meta):
|
||||||
fields = AccountSerializer.Meta.fields + ['spec_info']
|
fields = AccountSerializer.Meta.fields + ['spec_info']
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
@@ -470,6 +472,7 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
|
|||||||
|
|
||||||
class AccountHistorySerializer(serializers.ModelSerializer):
|
class AccountHistorySerializer(serializers.ModelSerializer):
|
||||||
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
|
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
|
||||||
|
secret = serializers.CharField(label=_('Secret'), read_only=True)
|
||||||
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
|
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|||||||
@@ -70,6 +70,8 @@ class AuthValidateMixin(serializers.Serializer):
|
|||||||
class BaseAccountSerializer(
|
class BaseAccountSerializer(
|
||||||
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
|
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
|
||||||
):
|
):
|
||||||
|
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = BaseAccount
|
model = BaseAccount
|
||||||
fields_mini = ["id", "name", "username"]
|
fields_mini = ["id", "name", "username"]
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
|
from django.conf import settings
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from accounts.const import AutomationTypes
|
from accounts.const import AutomationTypes, AccountBackupType
|
||||||
from accounts.models import BackupAccountAutomation
|
from accounts.models import BackupAccountAutomation
|
||||||
from common.serializers.fields import EncryptedField
|
from common.serializers.fields import EncryptedField
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
@@ -41,6 +42,17 @@ class BackupAccountSerializer(BaseAutomationSerializer):
|
|||||||
'types': {'label': _('Asset type')}
|
'types': {'label': _('Asset type')}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.set_backup_type_choices()
|
||||||
|
|
||||||
|
def set_backup_type_choices(self):
|
||||||
|
field_backup_type = self.fields.get("backup_type")
|
||||||
|
if not field_backup_type:
|
||||||
|
return
|
||||||
|
if not settings.XPACK_LICENSE_IS_VALID:
|
||||||
|
field_backup_type._choices.pop(AccountBackupType.object_storage, None)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def model_type(self):
|
def model_type(self):
|
||||||
return AutomationTypes.backup_account
|
return AutomationTypes.backup_account
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
|
|||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_is_success(obj):
|
def get_is_success(obj) -> bool:
|
||||||
return obj.status == ChangeSecretRecordStatusChoice.success
|
return obj.status == ChangeSecretRecordStatusChoice.success
|
||||||
|
|
||||||
|
|
||||||
@@ -157,7 +157,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
|||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_asset(instance):
|
def get_asset(instance) -> str:
|
||||||
return str(instance.asset)
|
return str(instance.asset)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -165,7 +165,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
|
|||||||
return str(instance.account)
|
return str(instance.account)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_is_success(obj):
|
def get_is_success(obj) -> str:
|
||||||
if obj.status == ChangeSecretRecordStatusChoice.success.value:
|
if obj.status == ChangeSecretRecordStatusChoice.success.value:
|
||||||
return _("Success")
|
return _("Success")
|
||||||
return _("Failed")
|
return _("Failed")
|
||||||
@@ -196,9 +196,9 @@ class ChangeSecretAccountSerializer(serializers.ModelSerializer):
|
|||||||
read_only_fields = fields
|
read_only_fields = fields
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_meta(obj):
|
def get_meta(obj) -> dict:
|
||||||
return account_secret_task_status.get(str(obj.id))
|
return account_secret_task_status.get(str(obj.id))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_ttl(obj):
|
def get_ttl(obj) -> int:
|
||||||
return account_secret_task_status.get_ttl(str(obj.id))
|
return account_secret_task_status.get_ttl(str(obj.id))
|
||||||
|
|||||||
@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
|
|||||||
risk_summary = serializers.SerializerMethodField()
|
risk_summary = serializers.SerializerMethodField()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_risk_summary(obj):
|
def get_risk_summary(obj) -> dict:
|
||||||
summary = {}
|
summary = {}
|
||||||
for risk in RiskChoice.choices:
|
for risk in RiskChoice.choices:
|
||||||
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)
|
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class DiscoverAccountAutomationSerializer(BaseAutomationSerializer):
|
|||||||
+ read_only_fields)
|
+ read_only_fields)
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
'check_risk': {
|
'check_risk': {
|
||||||
'help_text': _('Whether to check the risk of the gathered accounts.'),
|
'help_text': _('Whether to check the risk of the discovered accounts.'),
|
||||||
},
|
},
|
||||||
**BaseAutomationSerializer.Meta.extra_kwargs
|
**BaseAutomationSerializer.Meta.extra_kwargs
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.db.models import Q
|
from django.db.models import Q
|
||||||
@@ -72,24 +73,43 @@ def execute_automation_record_task(record_ids, tp):
|
|||||||
task_name = gettext_noop('Execute automation record')
|
task_name = gettext_noop('Execute automation record')
|
||||||
|
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
records = ChangeSecretRecord.objects.filter(id__in=record_ids)
|
records = ChangeSecretRecord.objects.filter(id__in=record_ids).order_by('-date_updated')
|
||||||
|
|
||||||
if not records:
|
if not records:
|
||||||
logger.error('No automation record found: {}'.format(record_ids))
|
logger.error(f'No automation record found: {record_ids}')
|
||||||
return
|
return
|
||||||
|
|
||||||
record = records[0]
|
seen_accounts = set()
|
||||||
record_map = {f'{record.asset_id}-{record.account_id}': str(record.id) for record in records}
|
unique_records = []
|
||||||
task_snapshot = {
|
for rec in records:
|
||||||
'params': {},
|
acct = str(rec.account_id)
|
||||||
'record_map': record_map,
|
if acct not in seen_accounts:
|
||||||
'secret': record.new_secret,
|
seen_accounts.add(acct)
|
||||||
'secret_type': record.execution.snapshot.get('secret_type'),
|
unique_records.append(rec)
|
||||||
'assets': [str(instance.asset_id) for instance in records],
|
|
||||||
'accounts': [str(instance.account_id) for instance in records],
|
exec_groups = defaultdict(list)
|
||||||
}
|
for rec in unique_records:
|
||||||
with tmp_to_org(record.execution.org_id):
|
exec_groups[rec.execution_id].append(rec)
|
||||||
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
|
||||||
|
for __, group in exec_groups.items():
|
||||||
|
latest_rec = group[0]
|
||||||
|
snapshot = getattr(latest_rec.execution, 'snapshot', {}) or {}
|
||||||
|
|
||||||
|
record_map = {f"{r.asset_id}-{r.account_id}": str(r.id) for r in group}
|
||||||
|
assets = [str(r.asset_id) for r in group]
|
||||||
|
accounts = [str(r.account_id) for r in group]
|
||||||
|
|
||||||
|
task_snapshot = {
|
||||||
|
'params': {},
|
||||||
|
'record_map': record_map,
|
||||||
|
'secret': latest_rec.new_secret,
|
||||||
|
'secret_type': snapshot.get('secret_type'),
|
||||||
|
'assets': assets,
|
||||||
|
'accounts': accounts,
|
||||||
|
}
|
||||||
|
|
||||||
|
with tmp_to_org(latest_rec.execution.org_id):
|
||||||
|
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(
|
@shared_task(
|
||||||
|
|||||||
@@ -11,4 +11,4 @@ class ActionChoices(models.TextChoices):
|
|||||||
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
|
notify_and_warn = 'notify_and_warn', _('Prompt and warn')
|
||||||
face_verify = 'face_verify', _('Face verify')
|
face_verify = 'face_verify', _('Face verify')
|
||||||
face_online = 'face_online', _('Face online')
|
face_online = 'face_online', _('Face online')
|
||||||
change_secret = 'change_secret', _('Change secret')
|
change_secret = 'change_secret', _('Secret rotation')
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from common.db.fields import JSONManyToManyField
|
from common.db.fields import JSONManyToManyField
|
||||||
from common.db.models import JMSBaseModel
|
from common.db.models import JMSBaseModel
|
||||||
from common.utils import contains_ip
|
from common.utils import contains_ip
|
||||||
from common.utils.time_period import contains_time_period
|
from common.utils.timezone import contains_time_period
|
||||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||||
from ..const import ActionChoices
|
from ..const import ActionChoices
|
||||||
|
|
||||||
|
|||||||
@@ -34,16 +34,16 @@ class CommandGroup(JMSOrgBaseModel):
|
|||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def pattern(self):
|
def pattern(self):
|
||||||
|
content = self.content.replace('\r\n', '\n')
|
||||||
if self.type == 'command':
|
if self.type == 'command':
|
||||||
s = self.construct_command_regex(self.content)
|
s = self.construct_command_regex(content)
|
||||||
else:
|
else:
|
||||||
s = r'{0}'.format(self.content)
|
s = r'{0}'.format(r'{}'.format('|'.join(content.split('\n'))))
|
||||||
return s
|
return s
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def construct_command_regex(cls, content):
|
def construct_command_regex(cls, content):
|
||||||
regex = []
|
regex = []
|
||||||
content = content.replace('\r\n', '\n')
|
|
||||||
for _cmd in content.split('\n'):
|
for _cmd in content.split('\n'):
|
||||||
cmd = re.sub(r'\s+', ' ', _cmd)
|
cmd = re.sub(r'\s+', ' ', _cmd)
|
||||||
cmd = re.escape(cmd)
|
cmd = re.escape(cmd)
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
from common.serializers.mixin import CommonBulkModelSerializer
|
||||||
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
|
||||||
from ..const import ActionChoices
|
from ..const import ActionChoices
|
||||||
from ..models import ConnectMethodACL
|
from ..models import ConnectMethodACL
|
||||||
@@ -6,16 +6,15 @@ from ..models import ConnectMethodACL
|
|||||||
__all__ = ["ConnectMethodACLSerializer"]
|
__all__ = ["ConnectMethodACLSerializer"]
|
||||||
|
|
||||||
|
|
||||||
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
|
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
|
||||||
class Meta(BaseSerializer.Meta):
|
class Meta(BaseSerializer.Meta):
|
||||||
model = ConnectMethodACL
|
model = ConnectMethodACL
|
||||||
fields = [
|
fields = [
|
||||||
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
i for i in BaseSerializer.Meta.fields + ['connect_methods']
|
||||||
if i not in ['assets', 'accounts']
|
if i not in ['assets', 'accounts', 'org_id']
|
||||||
]
|
]
|
||||||
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
|
||||||
ActionChoices.review,
|
ActionChoices.review,
|
||||||
ActionChoices.accept,
|
|
||||||
ActionChoices.notice,
|
ActionChoices.notice,
|
||||||
ActionChoices.face_verify,
|
ActionChoices.face_verify,
|
||||||
ActionChoices.face_online,
|
ActionChoices.face_online,
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext as _
|
||||||
|
|
||||||
|
from common.serializers import CommonBulkModelSerializer
|
||||||
from common.serializers import MethodSerializer
|
from common.serializers import MethodSerializer
|
||||||
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
|
|
||||||
from .base import BaseUserACLSerializer
|
from .base import BaseUserACLSerializer
|
||||||
from .rules import RuleSerializer
|
from .rules import RuleSerializer
|
||||||
from ..const import ActionChoices
|
from ..const import ActionChoices
|
||||||
@@ -12,12 +12,12 @@ __all__ = ["LoginACLSerializer"]
|
|||||||
common_help_text = _("With * indicating a match all. ")
|
common_help_text = _("With * indicating a match all. ")
|
||||||
|
|
||||||
|
|
||||||
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
|
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
|
||||||
rules = MethodSerializer(label=_('Rule'))
|
rules = MethodSerializer(label=_('Rule'))
|
||||||
|
|
||||||
class Meta(BaseUserACLSerializer.Meta):
|
class Meta(BaseUserACLSerializer.Meta):
|
||||||
model = LoginACL
|
model = LoginACL
|
||||||
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
|
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}) - {'org_id'})
|
||||||
action_choices_exclude = [
|
action_choices_exclude = [
|
||||||
ActionChoices.warning,
|
ActionChoices.warning,
|
||||||
ActionChoices.notify_and_warn,
|
ActionChoices.notify_and_warn,
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
#
|
#
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from rest_framework import serializers
|
from rest_framework import serializers
|
||||||
|
|
||||||
@@ -8,7 +10,7 @@ from common.utils.ip import is_ip_address, is_ip_network, is_ip_segment
|
|||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text']
|
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text', 'address_validator']
|
||||||
|
|
||||||
|
|
||||||
def ip_group_child_validator(ip_group_child):
|
def ip_group_child_validator(ip_group_child):
|
||||||
@@ -21,6 +23,19 @@ def ip_group_child_validator(ip_group_child):
|
|||||||
raise serializers.ValidationError(error)
|
raise serializers.ValidationError(error)
|
||||||
|
|
||||||
|
|
||||||
|
def address_validator(value):
|
||||||
|
parsed = urlparse(value)
|
||||||
|
is_basic_url = parsed.scheme in ('http', 'https') and parsed.netloc
|
||||||
|
is_valid = value == '*' \
|
||||||
|
or is_ip_address(value) \
|
||||||
|
or is_ip_network(value) \
|
||||||
|
or is_ip_segment(value) \
|
||||||
|
or is_basic_url
|
||||||
|
if not is_valid:
|
||||||
|
error = _('address invalid: `{}`').format(value)
|
||||||
|
raise serializers.ValidationError(error)
|
||||||
|
|
||||||
|
|
||||||
ip_group_help_text = _(
|
ip_group_help_text = _(
|
||||||
'With * indicating a match all. '
|
'With * indicating a match all. '
|
||||||
'Such as: '
|
'Such as: '
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ class CategoryViewSet(ListModelMixin, JMSGenericViewSet):
|
|||||||
'types': TypeSerializer,
|
'types': TypeSerializer,
|
||||||
}
|
}
|
||||||
permission_classes = (IsValidUser,)
|
permission_classes = (IsValidUser,)
|
||||||
|
default_limit = None
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return AllTypes.categories()
|
return AllTypes.categories()
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ class FavoriteAssetViewSet(BulkModelViewSet):
|
|||||||
serializer_class = FavoriteAssetSerializer
|
serializer_class = FavoriteAssetSerializer
|
||||||
permission_classes = (IsValidUser,)
|
permission_classes = (IsValidUser,)
|
||||||
filterset_fields = ['asset']
|
filterset_fields = ['asset']
|
||||||
|
default_limit = None
|
||||||
|
|
||||||
def dispatch(self, request, *args, **kwargs):
|
def dispatch(self, request, *args, **kwargs):
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
|
|||||||
@@ -7,15 +7,18 @@ from rest_framework.decorators import action
|
|||||||
from rest_framework.response import Response
|
from rest_framework.response import Response
|
||||||
|
|
||||||
from assets.const import AllTypes
|
from assets.const import AllTypes
|
||||||
from assets.models import Platform, Node, Asset, PlatformProtocol
|
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
|
||||||
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
|
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
|
||||||
from common.api import JMSModelViewSet
|
from common.api import JMSModelViewSet
|
||||||
from common.permissions import IsValidUser
|
from common.permissions import IsValidUser
|
||||||
from common.serializers import GroupedChoiceSerializer
|
from common.serializers import GroupedChoiceSerializer
|
||||||
|
from rbac.models import RoleBinding
|
||||||
|
|
||||||
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class PlatformFilter(filters.FilterSet):
|
class PlatformFilter(filters.FilterSet):
|
||||||
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
|
||||||
|
|
||||||
@@ -40,6 +43,7 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||||||
'ops_methods': 'assets.view_platform',
|
'ops_methods': 'assets.view_platform',
|
||||||
'filter_nodes_assets': 'assets.view_platform',
|
'filter_nodes_assets': 'assets.view_platform',
|
||||||
}
|
}
|
||||||
|
default_limit = None
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
# 因为没有走分页逻辑,所以需要这里 prefetch
|
# 因为没有走分页逻辑,所以需要这里 prefetch
|
||||||
@@ -63,6 +67,13 @@ class AssetPlatformViewSet(JMSModelViewSet):
|
|||||||
return super().get_object()
|
return super().get_object()
|
||||||
return self.get_queryset().get(name=pk)
|
return self.get_queryset().get(name=pk)
|
||||||
|
|
||||||
|
|
||||||
|
def check_permissions(self, request):
|
||||||
|
if self.action == 'list' and RoleBinding.is_org_admin(request.user):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return super().check_permissions(request)
|
||||||
|
|
||||||
def check_object_permissions(self, request, obj):
|
def check_object_permissions(self, request, obj):
|
||||||
if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal:
|
if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal:
|
||||||
self.permission_denied(
|
self.permission_denied(
|
||||||
@@ -102,6 +113,7 @@ class PlatformProtocolViewSet(JMSModelViewSet):
|
|||||||
|
|
||||||
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
class PlatformAutomationMethodsApi(generics.ListAPIView):
|
||||||
permission_classes = (IsValidUser,)
|
permission_classes = (IsValidUser,)
|
||||||
|
queryset = PlatformAutomation.objects.none()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def automation_methods():
|
def automation_methods():
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
from rest_framework.generics import ListAPIView
|
from rest_framework.generics import ListAPIView
|
||||||
|
|
||||||
from assets import serializers
|
from assets import serializers
|
||||||
from assets.const import Protocol
|
|
||||||
from common.permissions import IsValidUser
|
from common.permissions import IsValidUser
|
||||||
|
from assets.models import Protocol
|
||||||
|
|
||||||
__all__ = ['ProtocolListApi']
|
__all__ = ['ProtocolListApi']
|
||||||
|
|
||||||
@@ -13,3 +13,13 @@ class ProtocolListApi(ListAPIView):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
return list(Protocol.protocols())
|
return list(Protocol.protocols())
|
||||||
|
|
||||||
|
def filter_queryset(self, queryset):
|
||||||
|
search = self.request.query_params.get("search", "").lower().strip()
|
||||||
|
if not search:
|
||||||
|
return queryset
|
||||||
|
queryset = [
|
||||||
|
p for p in queryset
|
||||||
|
if search in p['label'].lower() or search in p['value'].lower()
|
||||||
|
]
|
||||||
|
return queryset
|
||||||
|
|||||||
@@ -161,6 +161,7 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
|
|||||||
'GET': 'assets.view_asset',
|
'GET': 'assets.view_asset',
|
||||||
'list': 'assets.view_asset',
|
'list': 'assets.view_asset',
|
||||||
}
|
}
|
||||||
|
queryset = Node.objects.none()
|
||||||
|
|
||||||
def get_assets(self):
|
def get_assets(self):
|
||||||
key = self.request.query_params.get('key')
|
key = self.request.query_params.get('key')
|
||||||
|
|||||||
@@ -123,9 +123,7 @@ class BaseManager:
|
|||||||
self.execution.summary = self.summary
|
self.execution.summary = self.summary
|
||||||
self.execution.result = self.result
|
self.execution.result = self.result
|
||||||
self.execution.status = self.status
|
self.execution.status = self.status
|
||||||
|
self.execution.save()
|
||||||
with safe_atomic_db_connection():
|
|
||||||
self.execution.save()
|
|
||||||
|
|
||||||
def print_summary(self):
|
def print_summary(self):
|
||||||
content = "\nSummery: \n"
|
content = "\nSummery: \n"
|
||||||
@@ -157,7 +155,7 @@ class BaseManager:
|
|||||||
report = self.gen_report()
|
report = self.gen_report()
|
||||||
report = transform(report, cssutils_logging_level="CRITICAL")
|
report = transform(report, cssutils_logging_level="CRITICAL")
|
||||||
subject = self.get_report_subject()
|
subject = self.get_report_subject()
|
||||||
emails = [r.email for r in recipients if r.email]
|
emails = [user.email]
|
||||||
send_mail_async(subject, report, emails, html_message=report)
|
send_mail_async(subject, report, emails, html_message=report)
|
||||||
|
|
||||||
def gen_report(self):
|
def gen_report(self):
|
||||||
@@ -167,9 +165,10 @@ class BaseManager:
|
|||||||
return data
|
return data
|
||||||
|
|
||||||
def post_run(self):
|
def post_run(self):
|
||||||
self.update_execution()
|
with safe_atomic_db_connection():
|
||||||
self.print_summary()
|
self.update_execution()
|
||||||
self.send_report_if_need()
|
self.print_summary()
|
||||||
|
self.send_report_if_need()
|
||||||
|
|
||||||
def run(self, *args, **kwargs):
|
def run(self, *args, **kwargs):
|
||||||
self.pre_run()
|
self.pre_run()
|
||||||
@@ -202,14 +201,17 @@ class PlaybookPrepareMixin:
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
|
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
|
||||||
self.method_id_meta_mapper = {
|
self.method_id_meta_mapper = self.get_method_id_meta_mapper()
|
||||||
|
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
||||||
|
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
||||||
|
self.playbooks = []
|
||||||
|
|
||||||
|
def get_method_id_meta_mapper(self):
|
||||||
|
return {
|
||||||
method["id"]: method
|
method["id"]: method
|
||||||
for method in self.platform_automation_methods
|
for method in self.platform_automation_methods
|
||||||
if method["method"] == self.__class__.method_type()
|
if method["method"] == self.__class__.method_type()
|
||||||
}
|
}
|
||||||
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
|
|
||||||
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
|
|
||||||
self.playbooks = []
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def method_type(cls):
|
def method_type(cls):
|
||||||
@@ -548,7 +550,8 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
|
|||||||
try:
|
try:
|
||||||
kwargs.update({"clean_workspace": False})
|
kwargs.update({"clean_workspace": False})
|
||||||
cb = runner.run(**kwargs)
|
cb = runner.run(**kwargs)
|
||||||
self.on_runner_success(runner, cb)
|
with safe_atomic_db_connection():
|
||||||
|
self.on_runner_success(runner, cb)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.on_runner_failed(runner, e, **info)
|
self.on_runner_failed(runner, e, **info)
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -11,15 +11,20 @@ class FormatAssetInfo:
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_cpu_model_count(cpus):
|
def get_cpu_model_count(cpus):
|
||||||
try:
|
try:
|
||||||
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
|
if len(cpus) % 3 == 0:
|
||||||
|
step = 3
|
||||||
|
models = [cpus[i + 2] for i in range(0, len(cpus), step)]
|
||||||
|
elif len(cpus) % 2 == 0:
|
||||||
|
step = 2
|
||||||
|
models = [cpus[i + 1] for i in range(0, len(cpus), step)]
|
||||||
|
else:
|
||||||
|
raise ValueError("CPU list format not recognized")
|
||||||
|
|
||||||
model_counts = Counter(models)
|
model_counts = Counter(models)
|
||||||
|
|
||||||
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error processing CPU model list: {e}")
|
print(f"Error processing CPU model list: {e}")
|
||||||
result = ''
|
result = ''
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -3,7 +3,8 @@
|
|||||||
vars:
|
vars:
|
||||||
ansible_shell_type: sh
|
ansible_shell_type: sh
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
ansible_python_interpreter: /opt/py3/bin/python
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
ansible_timeout: 30
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test asset connection (pyfreerdp)
|
- name: Test asset connection (pyfreerdp)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@
|
|||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
ansible_shell_type: sh
|
ansible_shell_type: sh
|
||||||
ansible_become: false
|
ansible_become: false
|
||||||
|
ansible_timeout: 30
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test asset connection (paramiko)
|
- name: Test asset connection (paramiko)
|
||||||
ssh_ping:
|
ssh_ping:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
vars:
|
vars:
|
||||||
ansible_connection: local
|
ansible_connection: local
|
||||||
ansible_shell_type: sh
|
ansible_shell_type: sh
|
||||||
|
ansible_timeout: 30
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test asset connection (telnet)
|
- name: Test asset connection (telnet)
|
||||||
telnet_ping:
|
telnet_ping:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
gather_facts: no
|
gather_facts: no
|
||||||
vars:
|
vars:
|
||||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
ansible_timeout: 30
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test MongoDB connection
|
- name: Test MongoDB connection
|
||||||
@@ -16,3 +17,5 @@
|
|||||||
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||||
connection_options:
|
connection_options:
|
||||||
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
|
||||||
|
register: result
|
||||||
|
failed_when: not result.is_available
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||||
|
ansible_timeout: 30
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test MySQL connection
|
- name: Test MySQL connection
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
gather_facts: no
|
gather_facts: no
|
||||||
vars:
|
vars:
|
||||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
ansible_timeout: 30
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test Oracle connection
|
- name: Test Oracle connection
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
|
||||||
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
|
||||||
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
|
||||||
|
ansible_timeout: 30
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test PostgreSQL connection
|
- name: Test PostgreSQL connection
|
||||||
community.postgresql.postgresql_ping:
|
community.postgresql.postgresql_ping:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
gather_facts: no
|
gather_facts: no
|
||||||
vars:
|
vars:
|
||||||
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
ansible_timeout: 30
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Test SQLServer connection
|
- name: Test SQLServer connection
|
||||||
|
|||||||
13
apps/assets/automations/ping/web/website/main.yml
Normal file
13
apps/assets/automations/ping/web/website/main.yml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
- hosts: website
|
||||||
|
gather_facts: no
|
||||||
|
vars:
|
||||||
|
ansible_python_interpreter: "{{ local_python_interpreter }}"
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- name: Test Website connection
|
||||||
|
website_ping:
|
||||||
|
login_host: "{{ jms_asset.address }}"
|
||||||
|
login_user: "{{ jms_account.username }}"
|
||||||
|
login_password: "{{ jms_account.secret }}"
|
||||||
|
steps: "{{ params.steps }}"
|
||||||
|
load_state: "{{ params.load_state }}"
|
||||||
50
apps/assets/automations/ping/web/website/manifest.yml
Normal file
50
apps/assets/automations/ping/web/website/manifest.yml
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
id: website_ping
|
||||||
|
name: "{{ 'Website ping' | trans }}"
|
||||||
|
method: ping
|
||||||
|
category:
|
||||||
|
- web
|
||||||
|
type:
|
||||||
|
- website
|
||||||
|
params:
|
||||||
|
- name: load_state
|
||||||
|
type: choice
|
||||||
|
label: "{{ 'Load state' | trans }}"
|
||||||
|
choices:
|
||||||
|
- [ networkidle, "{{ 'Network idle' | trans }}" ]
|
||||||
|
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
|
||||||
|
- [ load, "{{ 'Load completed' | trans }}" ]
|
||||||
|
default: 'load'
|
||||||
|
- name: steps
|
||||||
|
type: list
|
||||||
|
default: []
|
||||||
|
label: "{{ 'Steps' | trans }}"
|
||||||
|
help_text: "{{ 'Params step help text' | trans }}"
|
||||||
|
i18n:
|
||||||
|
Website ping:
|
||||||
|
zh: 使用 Playwright 模拟浏览器测试可连接性
|
||||||
|
en: Use Playwright to simulate a browser for connectivity testing
|
||||||
|
ja: Playwright を使用してブラウザをシミュレートし、接続性テストを実行する
|
||||||
|
Load state:
|
||||||
|
zh: 加载状态检测
|
||||||
|
en: Load state detection
|
||||||
|
ja: ロード状態の検出
|
||||||
|
Steps:
|
||||||
|
zh: 步骤
|
||||||
|
en: Steps
|
||||||
|
ja: 手順
|
||||||
|
Network idle:
|
||||||
|
zh: 网络空闲
|
||||||
|
en: Network idle
|
||||||
|
ja: ネットワークが空いた状態
|
||||||
|
Dom content loaded:
|
||||||
|
zh: 文档内容加载完成
|
||||||
|
en: Dom content loaded
|
||||||
|
ja: ドキュメントの内容がロードされた状態
|
||||||
|
Load completed:
|
||||||
|
zh: 全部加载完成
|
||||||
|
en: All load completed
|
||||||
|
ja: すべてのロードが完了した状態
|
||||||
|
Params step help text:
|
||||||
|
zh: 配置步骤,根据配置决定任务执行步骤
|
||||||
|
ja: パラメータを設定し、設定に基づいてタスクの実行手順を決定します
|
||||||
|
en: Configure steps, and determine the task execution steps based on the configuration.
|
||||||
@@ -14,6 +14,10 @@ class Connectivity(TextChoices):
|
|||||||
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
|
||||||
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
|
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|
||||||
|
|
||||||
class AutomationTypes(TextChoices):
|
class AutomationTypes(TextChoices):
|
||||||
ping = 'ping', _('Ping')
|
ping = 'ping', _('Ping')
|
||||||
|
|||||||
@@ -20,3 +20,7 @@ class Category(ChoicesMixin, models.TextChoices):
|
|||||||
_category = getattr(cls, category.upper(), None)
|
_category = getattr(cls, category.upper(), None)
|
||||||
choices = [(_category.value, _category.label)] if _category else cls.choices
|
choices = [(_category.value, _category.label)] if _category else cls.choices
|
||||||
return choices
|
return choices
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def as_dict(cls):
|
||||||
|
return {choice.value: choice.label for choice in cls}
|
||||||
|
|||||||
@@ -194,6 +194,12 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||||||
'default': '>=2014',
|
'default': '>=2014',
|
||||||
'label': _('Version'),
|
'label': _('Version'),
|
||||||
'help_text': _('SQL Server version, Different versions have different connection drivers')
|
'help_text': _('SQL Server version, Different versions have different connection drivers')
|
||||||
|
},
|
||||||
|
'encrypt': {
|
||||||
|
'type': 'bool',
|
||||||
|
'default': True,
|
||||||
|
'label': _('Encrypt'),
|
||||||
|
'help_text': _('Whether to use TLS encryption.')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -343,7 +349,7 @@ class Protocol(ChoicesMixin, models.TextChoices):
|
|||||||
for protocol, config in cls.settings().items():
|
for protocol, config in cls.settings().items():
|
||||||
if not xpack_enabled and config.get('xpack', False):
|
if not xpack_enabled and config.get('xpack', False):
|
||||||
continue
|
continue
|
||||||
protocols.append(protocol)
|
protocols.append({'label': protocol.label, 'value': protocol.value})
|
||||||
|
|
||||||
from assets.models.platform import PlatformProtocol
|
from assets.models.platform import PlatformProtocol
|
||||||
custom_protocols = (
|
custom_protocols = (
|
||||||
|
|||||||
@@ -20,13 +20,17 @@ class WebTypes(BaseType):
|
|||||||
def _get_automation_constrains(cls) -> dict:
|
def _get_automation_constrains(cls) -> dict:
|
||||||
constrains = {
|
constrains = {
|
||||||
'*': {
|
'*': {
|
||||||
'ansible_enabled': False,
|
'ansible_enabled': True,
|
||||||
'ping_enabled': False,
|
'ansible_config': {
|
||||||
|
'ansible_connection': 'local',
|
||||||
|
},
|
||||||
|
'ping_enabled': True,
|
||||||
'gather_facts_enabled': False,
|
'gather_facts_enabled': False,
|
||||||
'verify_account_enabled': False,
|
'verify_account_enabled': True,
|
||||||
'change_secret_enabled': False,
|
'change_secret_enabled': True,
|
||||||
'push_account_enabled': False,
|
'push_account_enabled': False,
|
||||||
'gather_accounts_enabled': False,
|
'gather_accounts_enabled': False,
|
||||||
|
'remove_account_enabled': False,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return constrains
|
return constrains
|
||||||
|
|||||||
@@ -112,7 +112,7 @@ class Protocol(models.Model):
|
|||||||
return protocols[0] if len(protocols) > 0 else {}
|
return protocols[0] if len(protocols) > 0 else {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def setting(self):
|
def setting(self) -> dict:
|
||||||
if self._setting is not None:
|
if self._setting is not None:
|
||||||
return self._setting
|
return self._setting
|
||||||
return self.asset_platform_protocol.get('setting', {})
|
return self.asset_platform_protocol.get('setting', {})
|
||||||
@@ -122,7 +122,7 @@ class Protocol(models.Model):
|
|||||||
self._setting = value
|
self._setting = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def public(self):
|
def public(self) -> bool:
|
||||||
return self.asset_platform_protocol.get('public', True)
|
return self.asset_platform_protocol.get('public', True)
|
||||||
|
|
||||||
|
|
||||||
@@ -210,7 +210,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
|||||||
return self.category == const.Category.DS and hasattr(self, 'ds')
|
return self.category == const.Category.DS and hasattr(self, 'ds')
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def spec_info(self):
|
def spec_info(self) -> dict:
|
||||||
instance = getattr(self, self.category, None)
|
instance = getattr(self, self.category, None)
|
||||||
if not instance:
|
if not instance:
|
||||||
return {}
|
return {}
|
||||||
@@ -240,7 +240,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
|||||||
return info
|
return info
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def auto_config(self):
|
def auto_config(self) -> dict:
|
||||||
platform = self.platform
|
platform = self.platform
|
||||||
auto_config = {
|
auto_config = {
|
||||||
'su_enabled': platform.su_enabled,
|
'su_enabled': platform.su_enabled,
|
||||||
@@ -343,11 +343,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
|
|||||||
return names
|
return names
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def type(self):
|
def type(self) -> str:
|
||||||
return self.platform.type
|
return self.platform.type
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
def category(self):
|
def category(self) -> str:
|
||||||
return self.platform.category
|
return self.platform.category
|
||||||
|
|
||||||
def is_category(self, category):
|
def is_category(self, category):
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
|
|||||||
return name
|
return name
|
||||||
|
|
||||||
def get_all_assets(self):
|
def get_all_assets(self):
|
||||||
nodes = self.nodes.all()
|
nodes = self.nodes.only("id", "key")
|
||||||
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
|
||||||
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
direct_asset_ids = self.assets.all().values_list("id", flat=True)
|
||||||
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
|
||||||
|
|||||||
@@ -573,7 +573,7 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
|
|||||||
return not self.__gt__(other)
|
return not self.__gt__(other)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self) -> str:
|
||||||
return self.value
|
return self.value
|
||||||
|
|
||||||
def computed_full_value(self):
|
def computed_full_value(self):
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ class PlatformProtocol(models.Model):
|
|||||||
return '{}/{}'.format(self.name, self.port)
|
return '{}/{}'.format(self.name, self.port)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def secret_types(self):
|
def secret_types(self) -> list:
|
||||||
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
|
||||||
|
|
||||||
@lazyproperty
|
@lazyproperty
|
||||||
|
|||||||
@@ -147,6 +147,7 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
|
|||||||
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
|
||||||
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
|
||||||
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
|
||||||
|
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||||
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
|
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
|
||||||
attrs=('id', 'name', 'type'))
|
attrs=('id', 'name', 'type'))
|
||||||
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
|
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
|
||||||
@@ -425,6 +426,18 @@ class DetailMixin(serializers.Serializer):
|
|||||||
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
|
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
|
||||||
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_auto_config(obj) -> dict:
|
||||||
|
return obj.auto_config
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_gathered_info(obj) -> dict:
|
||||||
|
return obj.gathered_info
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_spec_info(obj) -> dict:
|
||||||
|
return obj.spec_info
|
||||||
|
|
||||||
def get_instance(self):
|
def get_instance(self):
|
||||||
request = self.context.get('request')
|
request = self.context.get('request')
|
||||||
if not self.instance and UUID_PATTERN.findall(request.path):
|
if not self.instance and UUID_PATTERN.findall(request.path):
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
from django.db.models import QuerySet
|
from django.db.models import QuerySet
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _, get_language
|
||||||
|
|
||||||
from assets.models import Custom, Platform, Asset
|
from assets.models import Custom, Platform, Asset
|
||||||
from common.const import UUID_PATTERN
|
from common.const import UUID_PATTERN
|
||||||
from common.serializers import create_serializer_class
|
from common.serializers import create_serializer_class
|
||||||
from common.serializers.common import DictSerializer, MethodSerializer
|
from common.serializers.common import DictSerializer, MethodSerializer
|
||||||
|
from terminal.models import Applet
|
||||||
from .common import AssetSerializer
|
from .common import AssetSerializer
|
||||||
|
|
||||||
__all__ = ['CustomSerializer']
|
__all__ = ['CustomSerializer']
|
||||||
@@ -47,8 +48,38 @@ class CustomSerializer(AssetSerializer):
|
|||||||
|
|
||||||
if not platform:
|
if not platform:
|
||||||
return default_field
|
return default_field
|
||||||
|
|
||||||
custom_fields = platform.custom_fields
|
custom_fields = platform.custom_fields
|
||||||
|
|
||||||
if not custom_fields:
|
if not custom_fields:
|
||||||
return default_field
|
return default_field
|
||||||
name = platform.name.title() + 'CustomSerializer'
|
name = platform.name.title() + 'CustomSerializer'
|
||||||
|
|
||||||
|
applet = Applet.objects.filter(
|
||||||
|
name=platform.created_by.replace('Applet:', '')
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if not applet:
|
||||||
|
return create_serializer_class(name, custom_fields)()
|
||||||
|
|
||||||
|
i18n = applet.manifest.get('i18n', {})
|
||||||
|
|
||||||
|
lang = get_language()
|
||||||
|
lang_short = lang[:2]
|
||||||
|
|
||||||
|
def translate_text(key):
|
||||||
|
return (
|
||||||
|
i18n.get(key, {}).get(lang)
|
||||||
|
or i18n.get(key, {}).get(lang_short)
|
||||||
|
or key
|
||||||
|
)
|
||||||
|
|
||||||
|
for field in custom_fields:
|
||||||
|
label = field.get('label')
|
||||||
|
help_text = field.get('help_text')
|
||||||
|
if label:
|
||||||
|
field['label'] = translate_text(label)
|
||||||
|
if help_text:
|
||||||
|
field['help_text'] = translate_text(help_text)
|
||||||
|
|
||||||
return create_serializer_class(name, custom_fields)()
|
return create_serializer_class(name, custom_fields)()
|
||||||
|
|||||||
@@ -19,11 +19,13 @@ __all__ = [
|
|||||||
class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
|
class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
|
||||||
assets = ObjectRelatedField(many=True, required=False, queryset=Asset.objects, label=_('Assets'))
|
assets = ObjectRelatedField(many=True, required=False, queryset=Asset.objects, label=_('Assets'))
|
||||||
nodes = ObjectRelatedField(many=True, required=False, queryset=Node.objects, label=_('Nodes'))
|
nodes = ObjectRelatedField(many=True, required=False, queryset=Node.objects, label=_('Nodes'))
|
||||||
|
executed_amount = serializers.IntegerField(read_only=True, label=_('Executed amount'))
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
read_only_fields = [
|
read_only_fields = [
|
||||||
'date_created', 'date_updated', 'created_by',
|
'date_created', 'date_updated', 'created_by',
|
||||||
'periodic_display', 'executed_amount', 'type', 'last_execution_date'
|
'periodic_display', 'executed_amount', 'type',
|
||||||
|
'last_execution_date',
|
||||||
]
|
]
|
||||||
mini_fields = [
|
mini_fields = [
|
||||||
'id', 'name', 'type', 'is_periodic', 'interval',
|
'id', 'name', 'type', 'is_periodic', 'interval',
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ from rbac.permissions import RBACPermission
|
|||||||
from terminal.models import default_storage
|
from terminal.models import default_storage
|
||||||
from users.models import User
|
from users.models import User
|
||||||
from .backends import TYPE_ENGINE_MAPPING
|
from .backends import TYPE_ENGINE_MAPPING
|
||||||
from .const import ActivityChoices
|
from .const import ActivityChoices, ActionChoices
|
||||||
from .filters import UserSessionFilterSet, OperateLogFilterSet
|
from .filters import UserSessionFilterSet, OperateLogFilterSet
|
||||||
from .models import (
|
from .models import (
|
||||||
FTPLog, UserLoginLog, OperateLog, PasswordChangeLog,
|
FTPLog, UserLoginLog, OperateLog, PasswordChangeLog,
|
||||||
@@ -45,7 +45,7 @@ from .serializers import (
|
|||||||
FileSerializer, UserSessionSerializer, JobsAuditSerializer,
|
FileSerializer, UserSessionSerializer, JobsAuditSerializer,
|
||||||
ServiceAccessLogSerializer
|
ServiceAccessLogSerializer
|
||||||
)
|
)
|
||||||
from .utils import construct_userlogin_usernames
|
from .utils import construct_userlogin_usernames, record_operate_log_and_activity_log
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
@@ -126,6 +126,11 @@ class FTPLogViewSet(OrgModelViewSet):
|
|||||||
response['Content-Type'] = 'application/octet-stream'
|
response['Content-Type'] = 'application/octet-stream'
|
||||||
filename = escape_uri_path(ftp_log.filename)
|
filename = escape_uri_path(ftp_log.filename)
|
||||||
response["Content-Disposition"] = "attachment; filename*=UTF-8''{}".format(filename)
|
response["Content-Disposition"] = "attachment; filename*=UTF-8''{}".format(filename)
|
||||||
|
|
||||||
|
record_operate_log_and_activity_log(
|
||||||
|
[ftp_log.id], ActionChoices.download, '', self.model,
|
||||||
|
resource_display=f'{ftp_log.asset}: {ftp_log.filename}',
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@action(methods=[POST], detail=True, permission_classes=[IsServiceAccount, ], serializer_class=FileSerializer)
|
@action(methods=[POST], detail=True, permission_classes=[IsServiceAccount, ], serializer_class=FileSerializer)
|
||||||
@@ -167,10 +172,7 @@ class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
if current_org.is_root() or not settings.XPACK_ENABLED:
|
queryset = queryset.model.filter_queryset_by_org(queryset)
|
||||||
return queryset
|
|
||||||
users = self.get_org_member_usernames()
|
|
||||||
queryset = queryset.filter(username__in=users)
|
|
||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
@@ -292,12 +294,7 @@ class PasswordChangeLogViewSet(OrgReadonlyModelViewSet):
|
|||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
queryset = super().get_queryset()
|
queryset = super().get_queryset()
|
||||||
if not current_org.is_root():
|
return self.model.filter_queryset_by_org(queryset)
|
||||||
users = current_org.get_members()
|
|
||||||
queryset = queryset.filter(
|
|
||||||
user__in=[str(user) for user in users]
|
|
||||||
)
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
|
|
||||||
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):
|
||||||
|
|||||||
@@ -35,6 +35,7 @@ class OperateLogStore(ES, metaclass=Singleton):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exact_fields = {}
|
exact_fields = {}
|
||||||
|
fuzzy_fields = {}
|
||||||
match_fields = {
|
match_fields = {
|
||||||
'id', 'user', 'action', 'resource_type',
|
'id', 'user', 'action', 'resource_type',
|
||||||
'resource', 'remote_addr', 'org_id'
|
'resource', 'remote_addr', 'org_id'
|
||||||
@@ -44,7 +45,7 @@ class OperateLogStore(ES, metaclass=Singleton):
|
|||||||
}
|
}
|
||||||
if not config.get('INDEX'):
|
if not config.get('INDEX'):
|
||||||
config['INDEX'] = 'jumpserver_operate_log'
|
config['INDEX'] = 'jumpserver_operate_log'
|
||||||
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
|
super().__init__(config, properties, keyword_fields, exact_fields, fuzzy_fields, match_fields)
|
||||||
self.pre_use_check()
|
self.pre_use_check()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
from datetime import timedelta
|
from datetime import timedelta, datetime
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
@@ -40,7 +40,7 @@ __all__ = [
|
|||||||
|
|
||||||
class JobLog(JobExecution):
|
class JobLog(JobExecution):
|
||||||
@property
|
@property
|
||||||
def creator_name(self):
|
def creator_name(self) -> str:
|
||||||
return self.creator.name
|
return self.creator.name
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -73,6 +73,9 @@ class FTPLog(OrgModelMixin):
|
|||||||
models.Index(fields=['date_start', 'org_id'], name='idx_date_start_org'),
|
models.Index(fields=['date_start', 'org_id'], name='idx_date_start_org'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return "{0.id} of {0.user} to {0.asset}".format(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filepath(self):
|
def filepath(self):
|
||||||
return os.path.join(self.upload_to, self.date_start.strftime('%Y-%m-%d'), str(self.id))
|
return os.path.join(self.upload_to, self.date_start.strftime('%Y-%m-%d'), str(self.id))
|
||||||
@@ -186,6 +189,15 @@ class PasswordChangeLog(models.Model):
|
|||||||
class Meta:
|
class Meta:
|
||||||
verbose_name = _("Password change log")
|
verbose_name = _("Password change log")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def filter_queryset_by_org(queryset):
|
||||||
|
if not current_org.is_root():
|
||||||
|
users = current_org.get_members()
|
||||||
|
queryset = queryset.filter(
|
||||||
|
user__in=[str(user) for user in users]
|
||||||
|
)
|
||||||
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
class UserLoginLog(models.Model):
|
class UserLoginLog(models.Model):
|
||||||
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
|
||||||
@@ -220,7 +232,7 @@ class UserLoginLog(models.Model):
|
|||||||
return '%s(%s)' % (self.username, self.city)
|
return '%s(%s)' % (self.username, self.city)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backend_display(self):
|
def backend_display(self) -> str:
|
||||||
return gettext(self.backend)
|
return gettext(self.backend)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -246,7 +258,7 @@ class UserLoginLog(models.Model):
|
|||||||
return login_logs
|
return login_logs
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def reason_display(self):
|
def reason_display(self) -> str:
|
||||||
from authentication.errors import reason_choices, old_reason_choices
|
from authentication.errors import reason_choices, old_reason_choices
|
||||||
|
|
||||||
reason = reason_choices.get(self.reason)
|
reason = reason_choices.get(self.reason)
|
||||||
@@ -255,6 +267,15 @@ class UserLoginLog(models.Model):
|
|||||||
reason = old_reason_choices.get(self.reason, self.reason)
|
reason = old_reason_choices.get(self.reason, self.reason)
|
||||||
return reason
|
return reason
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def filter_queryset_by_org(queryset):
|
||||||
|
from audits.utils import construct_userlogin_usernames
|
||||||
|
if current_org.is_root() or not settings.XPACK_ENABLED:
|
||||||
|
return queryset
|
||||||
|
user_queryset = current_org.get_members()
|
||||||
|
users = construct_userlogin_usernames(user_queryset)
|
||||||
|
return queryset.filter(username__in=users)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
ordering = ["-datetime", "username"]
|
ordering = ["-datetime", "username"]
|
||||||
verbose_name = _("User login log")
|
verbose_name = _("User login log")
|
||||||
@@ -279,15 +300,15 @@ class UserSession(models.Model):
|
|||||||
return '%s(%s)' % (self.user, self.ip)
|
return '%s(%s)' % (self.user, self.ip)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backend_display(self):
|
def backend_display(self) -> str:
|
||||||
return gettext(self.backend)
|
return gettext(self.backend)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_active(self):
|
def is_active(self) -> bool:
|
||||||
return user_session_manager.check_active(self.key)
|
return user_session_manager.check_active(self.key)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def date_expired(self):
|
def date_expired(self) -> datetime:
|
||||||
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
|
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
|
||||||
session_store = session_store_cls(session_key=self.key)
|
session_store = session_store_cls(session_key=self.key)
|
||||||
cache_key = session_store.cache_key
|
cache_key = session_store.cache_key
|
||||||
|
|||||||
@@ -119,11 +119,11 @@ class OperateLogSerializer(BulkOrgResourceModelSerializer):
|
|||||||
fields = fields_small
|
fields = fields_small
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_resource_type(instance):
|
def get_resource_type(instance) -> str:
|
||||||
return _(instance.resource_type)
|
return _(instance.resource_type)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_resource(instance):
|
def get_resource(instance) -> str:
|
||||||
return i18n_trans(instance.resource)
|
return i18n_trans(instance.resource)
|
||||||
|
|
||||||
|
|
||||||
@@ -147,11 +147,11 @@ class ActivityUnionLogSerializer(serializers.Serializer):
|
|||||||
r_type = serializers.CharField(read_only=True)
|
r_type = serializers.CharField(read_only=True)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_timestamp(obj):
|
def get_timestamp(obj) -> str:
|
||||||
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
|
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_content(obj):
|
def get_content(obj) -> str:
|
||||||
if not obj['r_detail']:
|
if not obj['r_detail']:
|
||||||
action = obj['r_action'].replace('_', ' ').capitalize()
|
action = obj['r_action'].replace('_', ' ').capitalize()
|
||||||
ctn = _('%s %s this resource') % (obj['r_user'], _(action).lower())
|
ctn = _('%s %s this resource') % (obj['r_user'], _(action).lower())
|
||||||
@@ -160,7 +160,7 @@ class ActivityUnionLogSerializer(serializers.Serializer):
|
|||||||
return ctn
|
return ctn
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_detail_url(obj):
|
def get_detail_url(obj) -> str:
|
||||||
detail_url = ''
|
detail_url = ''
|
||||||
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
|
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
|
||||||
if not detail_id:
|
if not detail_id:
|
||||||
@@ -210,7 +210,7 @@ class UserSessionSerializer(serializers.ModelSerializer):
|
|||||||
"backend_display": {"label": _("Auth backend display")},
|
"backend_display": {"label": _("Auth backend display")},
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_is_current_user_session(self, obj):
|
def get_is_current_user_session(self, obj) -> bool:
|
||||||
request = self.context.get('request')
|
request = self.context.get('request')
|
||||||
if not request:
|
if not request:
|
||||||
return False
|
return False
|
||||||
|
|||||||
@@ -89,6 +89,8 @@ def create_activities(resource_ids, detail, detail_id, action, org_id):
|
|||||||
for activity in activities:
|
for activity in activities:
|
||||||
create_activity(activity)
|
create_activity(activity)
|
||||||
|
|
||||||
|
create_activity.finish()
|
||||||
|
|
||||||
|
|
||||||
@signals.after_task_publish.connect
|
@signals.after_task_publish.connect
|
||||||
def after_task_publish_for_activity_log(headers=None, body=None, **kwargs):
|
def after_task_publish_for_activity_log(headers=None, body=None, **kwargs):
|
||||||
|
|||||||
@@ -180,7 +180,7 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
|
|||||||
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
|
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
|
||||||
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
|
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
|
||||||
'BaseAutomation', 'CeleryTask', 'Command', 'JobLog',
|
'BaseAutomation', 'CeleryTask', 'Command', 'JobLog',
|
||||||
'ConnectionToken', 'SessionJoinRecord',
|
'ConnectionToken', 'SessionJoinRecord', 'SessionSharing',
|
||||||
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
|
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
|
||||||
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
|
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
|
||||||
'SuperTicket', 'SuperConnectionToken', 'AdminConnectionToken', 'PermNode',
|
'SuperTicket', 'SuperConnectionToken', 'AdminConnectionToken', 'PermNode',
|
||||||
|
|||||||
@@ -2,18 +2,19 @@
|
|||||||
#
|
#
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.core.files.storage import default_storage
|
from django.core.files.storage import default_storage
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
from django.utils._os import safe_join
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from common.const.crontab import CRONTAB_AT_AM_TWO
|
from common.const.crontab import CRONTAB_AT_AM_TWO
|
||||||
from common.storage.ftp_file import FTPFileStorageHandler
|
from common.storage.ftp_file import FTPFileStorageHandler
|
||||||
from common.utils import get_log_keep_day, get_logger
|
from common.utils import get_log_keep_day, get_logger
|
||||||
|
from common.utils.safe import safe_run_cmd
|
||||||
from ops.celery.decorator import register_as_period_task
|
from ops.celery.decorator import register_as_period_task
|
||||||
from ops.models import CeleryTaskExecution
|
from ops.models import CeleryTaskExecution
|
||||||
from orgs.utils import tmp_to_root_org
|
from orgs.utils import tmp_to_root_org
|
||||||
@@ -57,14 +58,12 @@ def clean_ftp_log_period():
|
|||||||
now = timezone.now()
|
now = timezone.now()
|
||||||
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
|
||||||
expired_day = now - datetime.timedelta(days=days)
|
expired_day = now - datetime.timedelta(days=days)
|
||||||
file_store_dir = os.path.join(default_storage.base_location, FTPLog.upload_to)
|
file_store_dir = safe_join(default_storage.base_location, FTPLog.upload_to)
|
||||||
FTPLog.objects.filter(date_start__lt=expired_day).delete()
|
FTPLog.objects.filter(date_start__lt=expired_day).delete()
|
||||||
command = "find %s -mtime +%s -type f -exec rm -f {} \\;" % (
|
command = "find %s -mtime +%s -type f -exec rm -f {} \\;"
|
||||||
file_store_dir, days
|
safe_run_cmd(command, (file_store_dir, days))
|
||||||
)
|
command = "find %s -type d -empty -delete;"
|
||||||
subprocess.call(command, shell=True)
|
safe_run_cmd(command, (file_store_dir,))
|
||||||
command = "find %s -type d -empty -delete;" % file_store_dir
|
|
||||||
subprocess.call(command, shell=True)
|
|
||||||
logger.info("Clean FTP file done")
|
logger.info("Clean FTP file done")
|
||||||
|
|
||||||
|
|
||||||
@@ -76,12 +75,11 @@ def clean_celery_tasks_period():
|
|||||||
tasks.delete()
|
tasks.delete()
|
||||||
tasks = CeleryTaskExecution.objects.filter(date_start__isnull=True)
|
tasks = CeleryTaskExecution.objects.filter(date_start__isnull=True)
|
||||||
tasks.delete()
|
tasks.delete()
|
||||||
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;" % (
|
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;"
|
||||||
settings.CELERY_LOG_DIR, expire_days
|
safe_run_cmd(command, (settings.CELERY_LOG_DIR, expire_days))
|
||||||
)
|
celery_log_path = safe_join(settings.LOG_DIR, 'celery.log')
|
||||||
subprocess.call(command, shell=True)
|
command = "echo > %s"
|
||||||
command = "echo > {}".format(os.path.join(settings.LOG_DIR, 'celery.log'))
|
safe_run_cmd(command, (celery_log_path,))
|
||||||
subprocess.call(command, shell=True)
|
|
||||||
|
|
||||||
|
|
||||||
def batch_delete(queryset, batch_size=3000):
|
def batch_delete(queryset, batch_size=3000):
|
||||||
@@ -119,15 +117,15 @@ def clean_expired_session_period():
|
|||||||
expired_sessions = Session.objects.filter(date_start__lt=expire_date)
|
expired_sessions = Session.objects.filter(date_start__lt=expire_date)
|
||||||
timestamp = expire_date.timestamp()
|
timestamp = expire_date.timestamp()
|
||||||
expired_commands = Command.objects.filter(timestamp__lt=timestamp)
|
expired_commands = Command.objects.filter(timestamp__lt=timestamp)
|
||||||
replay_dir = os.path.join(default_storage.base_location, 'replay')
|
replay_dir = safe_join(default_storage.base_location, 'replay')
|
||||||
|
|
||||||
batch_delete(expired_sessions)
|
batch_delete(expired_sessions)
|
||||||
logger.info("Clean session item done")
|
logger.info("Clean session item done")
|
||||||
batch_delete(expired_commands)
|
batch_delete(expired_commands)
|
||||||
logger.info("Clean session command done")
|
logger.info("Clean session command done")
|
||||||
remove_files_by_days(replay_dir, days)
|
remove_files_by_days(replay_dir, days)
|
||||||
command = "find %s -type d -empty -delete;" % replay_dir
|
command = "find %s -type d -empty -delete;"
|
||||||
subprocess.call(command, shell=True)
|
safe_run_cmd(command, (replay_dir,))
|
||||||
logger.info("Clean session replay done")
|
logger.info("Clean session replay done")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import copy
|
import copy
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
|
||||||
from django.core.exceptions import ObjectDoesNotExist
|
from django.core.exceptions import ObjectDoesNotExist
|
||||||
@@ -7,7 +8,6 @@ from django.db import models
|
|||||||
from django.db.models import F, Value, CharField
|
from django.db.models import F, Value, CharField
|
||||||
from django.db.models.functions import Concat
|
from django.db.models.functions import Concat
|
||||||
from django.utils import translation
|
from django.utils import translation
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
from common.db.fields import RelatedManager
|
from common.db.fields import RelatedManager
|
||||||
from common.utils import validate_ip, get_ip_city, get_logger
|
from common.utils import validate_ip, get_ip_city, get_logger
|
||||||
@@ -16,7 +16,6 @@ from .const import DEFAULT_CITY, ActivityChoices as LogChoice
|
|||||||
from .handler import create_or_update_operate_log
|
from .handler import create_or_update_operate_log
|
||||||
from .models import ActivityLog
|
from .models import ActivityLog
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__name__)
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -151,7 +150,7 @@ def record_operate_log_and_activity_log(ids, action, detail, model, **kwargs):
|
|||||||
|
|
||||||
org_id = current_org.id
|
org_id = current_org.id
|
||||||
with translation.override('en'):
|
with translation.override('en'):
|
||||||
resource_type = model._meta.verbose_name
|
resource_type = kwargs.pop('resource_type', None) or model._meta.verbose_name
|
||||||
create_or_update_operate_log(action, resource_type, force=True, **kwargs)
|
create_or_update_operate_log(action, resource_type, force=True, **kwargs)
|
||||||
base_data = {'type': LogChoice.operate_log, 'detail': detail, 'org_id': org_id}
|
base_data = {'type': LogChoice.operate_log, 'detail': detail, 'org_id': org_id}
|
||||||
activities = [ActivityLog(resource_id=r_id, **base_data) for r_id in ids]
|
activities = [ActivityLog(resource_id=r_id, **base_data) for r_id in ids]
|
||||||
|
|||||||
@@ -37,6 +37,7 @@ class UserConfirmationViewSet(JMSGenericViewSet):
|
|||||||
backend_classes = ConfirmType.get_prop_backends(confirm_type)
|
backend_classes = ConfirmType.get_prop_backends(confirm_type)
|
||||||
if not backend_classes:
|
if not backend_classes:
|
||||||
return
|
return
|
||||||
|
|
||||||
for backend_cls in backend_classes:
|
for backend_cls in backend_classes:
|
||||||
backend = backend_cls(self.request.user, self.request)
|
backend = backend_cls(self.request.user, self.request)
|
||||||
if not backend.check():
|
if not backend.check():
|
||||||
@@ -69,6 +70,7 @@ class UserConfirmationViewSet(JMSGenericViewSet):
|
|||||||
ok, msg = backend.authenticate(secret_key, mfa_type)
|
ok, msg = backend.authenticate(secret_key, mfa_type)
|
||||||
if ok:
|
if ok:
|
||||||
request.session['CONFIRM_LEVEL'] = ConfirmType.values.index(confirm_type) + 1
|
request.session['CONFIRM_LEVEL'] = ConfirmType.values.index(confirm_type) + 1
|
||||||
|
request.session['CONFIRM_TYPE'] = confirm_type
|
||||||
request.session['CONFIRM_TIME'] = int(time.time())
|
request.session['CONFIRM_TIME'] = int(time.time())
|
||||||
return Response('ok')
|
return Response('ok')
|
||||||
return Response({'error': msg}, status=400)
|
return Response({'error': msg}, status=400)
|
||||||
|
|||||||
@@ -618,6 +618,8 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
|
|||||||
|
|
||||||
token_id = request.data.get('id') or ''
|
token_id = request.data.get('id') or ''
|
||||||
token = ConnectionToken.get_typed_connection_token(token_id)
|
token = ConnectionToken.get_typed_connection_token(token_id)
|
||||||
|
if not token:
|
||||||
|
raise PermissionDenied('Token {} is not valid'.format(token))
|
||||||
token.is_valid()
|
token.is_valid()
|
||||||
serializer = self.get_serializer(instance=token)
|
serializer = self.get_serializer(instance=token)
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from rest_framework.response import Response
|
|||||||
from authentication.errors import ACLError
|
from authentication.errors import ACLError
|
||||||
from common.api import JMSGenericViewSet
|
from common.api import JMSGenericViewSet
|
||||||
from common.const.http import POST, GET
|
from common.const.http import POST, GET
|
||||||
from common.permissions import OnlySuperUser
|
|
||||||
from common.serializers import EmptySerializer
|
from common.serializers import EmptySerializer
|
||||||
from common.utils import reverse, safe_next_url
|
from common.utils import reverse, safe_next_url
|
||||||
from common.utils.timezone import utc_now
|
from common.utils.timezone import utc_now
|
||||||
@@ -38,8 +37,11 @@ class SSOViewSet(AuthMixin, JMSGenericViewSet):
|
|||||||
'login_url': SSOTokenSerializer,
|
'login_url': SSOTokenSerializer,
|
||||||
'login': EmptySerializer
|
'login': EmptySerializer
|
||||||
}
|
}
|
||||||
|
rbac_perms = {
|
||||||
|
'login_url': 'authentication.add_ssotoken',
|
||||||
|
}
|
||||||
|
|
||||||
@action(methods=[POST], detail=False, permission_classes=[OnlySuperUser], url_path='login-url')
|
@action(methods=[POST], detail=False, url_path='login-url')
|
||||||
def login_url(self, request, *args, **kwargs):
|
def login_url(self, request, *args, **kwargs):
|
||||||
if not settings.AUTH_SSO:
|
if not settings.AUTH_SSO:
|
||||||
raise SSOAuthClosed()
|
raise SSOAuthClosed()
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from django.contrib.auth.backends import ModelBackend
|
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
|
from django.contrib.auth.backends import ModelBackend
|
||||||
|
from django.views import View
|
||||||
|
|
||||||
from users.models import User
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
from users.models import User
|
||||||
|
|
||||||
UserModel = get_user_model()
|
UserModel = get_user_model()
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
@@ -61,4 +61,13 @@ class JMSBaseAuthBackend:
|
|||||||
|
|
||||||
|
|
||||||
class JMSModelBackend(JMSBaseAuthBackend, ModelBackend):
|
class JMSModelBackend(JMSBaseAuthBackend, ModelBackend):
|
||||||
pass
|
def user_can_authenticate(self, user):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAuthCallbackClientView(View):
|
||||||
|
http_method_names = ['get']
|
||||||
|
|
||||||
|
def get(self, request):
|
||||||
|
from authentication.views.utils import redirect_to_guard_view
|
||||||
|
return redirect_to_guard_view(query_string='next=client')
|
||||||
|
|||||||
@@ -1,14 +1,51 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
from django_cas_ng.backends import CASBackend as _CASBackend
|
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
from django.contrib.auth import get_user_model
|
||||||
|
from django_cas_ng.backends import CASBackend as _CASBackend
|
||||||
|
|
||||||
|
from common.utils import get_logger
|
||||||
from ..base import JMSBaseAuthBackend
|
from ..base import JMSBaseAuthBackend
|
||||||
|
|
||||||
__all__ = ['CASBackend']
|
__all__ = ['CASBackend', 'CASUserDoesNotExist']
|
||||||
|
logger = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class CASUserDoesNotExist(Exception):
|
||||||
|
"""Exception raised when a CAS user does not exist."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class CASBackend(JMSBaseAuthBackend, _CASBackend):
|
class CASBackend(JMSBaseAuthBackend, _CASBackend):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def is_enabled():
|
def is_enabled():
|
||||||
return settings.AUTH_CAS
|
return settings.AUTH_CAS
|
||||||
|
|
||||||
|
def authenticate(self, request, ticket, service):
|
||||||
|
UserModel = get_user_model()
|
||||||
|
manager = UserModel._default_manager
|
||||||
|
original_get_by_natural_key = manager.get_by_natural_key
|
||||||
|
thread_local = threading.local()
|
||||||
|
thread_local.thread_id = threading.get_ident()
|
||||||
|
logger.debug(f"CASBackend.authenticate: thread_id={thread_local.thread_id}")
|
||||||
|
|
||||||
|
def get_by_natural_key(self, username):
|
||||||
|
logger.debug(f"CASBackend.get_by_natural_key: thread_id={threading.get_ident()}, username={username}")
|
||||||
|
if threading.get_ident() != thread_local.thread_id:
|
||||||
|
return original_get_by_natural_key(username)
|
||||||
|
|
||||||
|
try:
|
||||||
|
user = original_get_by_natural_key(username)
|
||||||
|
except UserModel.DoesNotExist:
|
||||||
|
raise CASUserDoesNotExist(username)
|
||||||
|
return user
|
||||||
|
|
||||||
|
try:
|
||||||
|
manager.get_by_natural_key = get_by_natural_key.__get__(manager, type(manager))
|
||||||
|
user = super().authenticate(request, ticket=ticket, service=service)
|
||||||
|
finally:
|
||||||
|
manager.get_by_natural_key = original_get_by_natural_key
|
||||||
|
return user
|
||||||
|
|||||||
@@ -1,23 +1,33 @@
|
|||||||
from django.core.exceptions import PermissionDenied
|
from django.core.exceptions import PermissionDenied
|
||||||
from django.http import HttpResponseRedirect
|
from django.http import HttpResponseRedirect
|
||||||
from django.views.generic import View
|
from django.utils.translation import gettext_lazy as _
|
||||||
from django_cas_ng.views import LoginView
|
from django_cas_ng.views import LoginView
|
||||||
|
|
||||||
__all__ = ['LoginView']
|
from authentication.backends.base import BaseAuthCallbackClientView
|
||||||
|
from common.utils import FlashMessageUtil
|
||||||
|
from .backends import CASUserDoesNotExist
|
||||||
|
|
||||||
from authentication.views.utils import redirect_to_guard_view
|
__all__ = ['LoginView']
|
||||||
|
|
||||||
|
|
||||||
class CASLoginView(LoginView):
|
class CASLoginView(LoginView):
|
||||||
def get(self, request):
|
def get(self, request):
|
||||||
try:
|
try:
|
||||||
return super().get(request)
|
resp = super().get(request)
|
||||||
|
return resp
|
||||||
except PermissionDenied:
|
except PermissionDenied:
|
||||||
return HttpResponseRedirect('/')
|
return HttpResponseRedirect('/')
|
||||||
|
except CASUserDoesNotExist as e:
|
||||||
|
message_data = {
|
||||||
|
'title': _('User does not exist: {}').format(e),
|
||||||
|
'error': _(
|
||||||
|
'CAS login was successful, but no corresponding local user was found in the system, and automatic '
|
||||||
|
'user creation is disabled in the CAS authentication configuration. Login failed.'),
|
||||||
|
'interval': 10,
|
||||||
|
'redirect_url': '/',
|
||||||
|
}
|
||||||
|
return FlashMessageUtil.gen_and_redirect_to(message_data)
|
||||||
|
|
||||||
|
|
||||||
class CASCallbackClientView(View):
|
class CASCallbackClientView(BaseAuthCallbackClientView):
|
||||||
http_method_names = ['get', ]
|
pass
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
return redirect_to_guard_view(query_string='next=client')
|
|
||||||
|
|||||||
@@ -5,10 +5,10 @@ from django.urls import reverse
|
|||||||
from django.utils.http import urlencode
|
from django.utils.http import urlencode
|
||||||
from django.views import View
|
from django.views import View
|
||||||
|
|
||||||
|
from authentication.backends.base import BaseAuthCallbackClientView
|
||||||
from authentication.mixins import authenticate
|
from authentication.mixins import authenticate
|
||||||
from authentication.utils import build_absolute_uri
|
from authentication.utils import build_absolute_uri
|
||||||
from authentication.views.mixins import FlashMessageMixin
|
from authentication.views.mixins import FlashMessageMixin
|
||||||
from authentication.views.utils import redirect_to_guard_view
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
@@ -67,11 +67,8 @@ class OAuth2AuthCallbackView(View, FlashMessageMixin):
|
|||||||
return HttpResponseRedirect(redirect_url)
|
return HttpResponseRedirect(redirect_url)
|
||||||
|
|
||||||
|
|
||||||
class OAuth2AuthCallbackClientView(View):
|
class OAuth2AuthCallbackClientView(BaseAuthCallbackClientView):
|
||||||
http_method_names = ['get', ]
|
pass
|
||||||
|
|
||||||
def get(self, request):
|
|
||||||
return redirect_to_guard_view(query_string='next=client')
|
|
||||||
|
|
||||||
|
|
||||||
class OAuth2EndSessionView(View):
|
class OAuth2EndSessionView(View):
|
||||||
|
|||||||
@@ -224,7 +224,6 @@ class OIDCAuthCodeBackend(OIDCBaseBackend):
|
|||||||
user_auth_failed.send(
|
user_auth_failed.send(
|
||||||
sender=self.__class__, request=request, username=user.username,
|
sender=self.__class__, request=request, username=user.username,
|
||||||
reason="User is invalid", backend=settings.AUTH_BACKEND_OIDC_CODE
|
reason="User is invalid", backend=settings.AUTH_BACKEND_OIDC_CODE
|
||||||
|
|
||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|||||||
@@ -10,16 +10,15 @@ import datetime as dt
|
|||||||
from calendar import timegm
|
from calendar import timegm
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
from django.core.exceptions import SuspiciousOperation
|
from django.core.exceptions import SuspiciousOperation
|
||||||
from django.utils.encoding import force_bytes, smart_bytes
|
from django.utils.encoding import force_bytes, smart_bytes
|
||||||
from jwkest import JWKESTException
|
from jwkest import JWKESTException
|
||||||
from jwkest.jwk import KEYS
|
from jwkest.jwk import KEYS
|
||||||
from jwkest.jws import JWS
|
from jwkest.jws import JWS
|
||||||
from django.conf import settings
|
|
||||||
|
|
||||||
from common.utils import get_logger
|
from common.utils import get_logger
|
||||||
|
|
||||||
|
|
||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
@@ -99,7 +98,8 @@ def _validate_claims(id_token, nonce=None, validate_nonce=True):
|
|||||||
raise SuspiciousOperation('Incorrect id_token: nbf')
|
raise SuspiciousOperation('Incorrect id_token: nbf')
|
||||||
|
|
||||||
# Verifies that the token was issued in the allowed timeframe.
|
# Verifies that the token was issued in the allowed timeframe.
|
||||||
if utc_timestamp > id_token['iat'] + settings.AUTH_OPENID_ID_TOKEN_MAX_AGE:
|
max_age = settings.AUTH_OPENID_ID_TOKEN_MAX_AGE
|
||||||
|
if utc_timestamp > id_token['iat'] + max_age:
|
||||||
logger.debug(log_prompt.format('Incorrect id_token: iat'))
|
logger.debug(log_prompt.format('Incorrect id_token: iat'))
|
||||||
raise SuspiciousOperation('Incorrect id_token: iat')
|
raise SuspiciousOperation('Incorrect id_token: iat')
|
||||||
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user