mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-16 17:12:53 +00:00
Compare commits
113 Commits
origin/dev
...
v3.0.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dd50a1faff | ||
|
|
86dab4fc6e | ||
|
|
a85a80a945 | ||
|
|
349edc10aa | ||
|
|
44918e3cb5 | ||
|
|
9a2f6c0d70 | ||
|
|
934969a8f1 | ||
|
|
57162c1628 | ||
|
|
32fb36867f | ||
|
|
158b589028 | ||
|
|
d64277353c | ||
|
|
bff6f397ce | ||
|
|
0ad461a804 | ||
|
|
a1dcef0ba0 | ||
|
|
dbb1ee3a75 | ||
|
|
d6bd207a17 | ||
|
|
e69ba27ff4 | ||
|
|
adbe7c07c6 | ||
|
|
d1eacf53d4 | ||
|
|
19276e6bd4 | ||
|
|
8757cc97ed | ||
|
|
aac805f5e4 | ||
|
|
6febc104de | ||
|
|
733b95ee99 | ||
|
|
b179264127 | ||
|
|
c18388e27a | ||
|
|
52830db500 | ||
|
|
2324cdc14e | ||
|
|
bab4562820 | ||
|
|
613a7d63b5 | ||
|
|
129c0e1bf4 | ||
|
|
384873b4cb | ||
|
|
9e410bb389 | ||
|
|
9337463471 | ||
|
|
e6d50cc8b4 | ||
|
|
fa08517bea | ||
|
|
d808256e6a | ||
|
|
061b60ef59 | ||
|
|
c008115888 | ||
|
|
8d1fb84aaf | ||
|
|
43d61b5348 | ||
|
|
c26a786287 | ||
|
|
cb2bd0cf2c | ||
|
|
3048e6311b | ||
|
|
31de9375e7 | ||
|
|
188c04c9a6 | ||
|
|
a82ed3e924 | ||
|
|
831b67eae4 | ||
|
|
4642804077 | ||
|
|
09160fed5d | ||
|
|
8409523fee | ||
|
|
f52a0ce960 | ||
|
|
d34c4fb7ec | ||
|
|
c12efffcc9 | ||
|
|
6319be0ea3 | ||
|
|
4d7f8ffc71 | ||
|
|
c665b0dbae | ||
|
|
a770a19252 | ||
|
|
717f97cd88 | ||
|
|
d3355ab0ec | ||
|
|
7ac385d64c | ||
|
|
2898c35970 | ||
|
|
62f5662bd0 | ||
|
|
0fe221019a | ||
|
|
d745314aa1 | ||
|
|
153fad9ac7 | ||
|
|
0792c7ec49 | ||
|
|
e617697553 | ||
|
|
9dc7da3595 | ||
|
|
f7f4d3a42e | ||
|
|
70fcbfe883 | ||
|
|
68aad56bad | ||
|
|
85b2ec2e6a | ||
|
|
be75edcb41 | ||
|
|
c41fc54380 | ||
|
|
c2fbe5c75a | ||
|
|
33090c4cdf | ||
|
|
b5ac5c5670 | ||
|
|
d672122c79 | ||
|
|
514fa9cf0a | ||
|
|
7f52675bd3 | ||
|
|
a4be0ff2f3 | ||
|
|
e83d676712 | ||
|
|
015ff4b119 | ||
|
|
c04ab1aab9 | ||
|
|
714b6b1233 | ||
|
|
6f49d240af | ||
|
|
afcbe60531 | ||
|
|
f98c170b8c | ||
|
|
21c41a6334 | ||
|
|
005dd27701 | ||
|
|
8080d36d90 | ||
|
|
91a34d1a88 | ||
|
|
166745baf6 | ||
|
|
c77f02b295 | ||
|
|
cfed849175 | ||
|
|
5996cedcd6 | ||
|
|
a64ec8a1d2 | ||
|
|
45331dc9e8 | ||
|
|
18c388f3a5 | ||
|
|
7be76feeb0 | ||
|
|
ff6dbe67a6 | ||
|
|
c10436de47 | ||
|
|
37a3566b0e | ||
|
|
2b364c1476 | ||
|
|
2036037675 | ||
|
|
6bd597eadd | ||
|
|
fbd0b44d4f | ||
|
|
35722a8466 | ||
|
|
d27947919b | ||
|
|
151d897746 | ||
|
|
d6aad41d05 | ||
|
|
5f7fa7e02f |
@@ -6,7 +6,7 @@ from rest_framework.response import Response
|
|||||||
from accounts import serializers
|
from accounts import serializers
|
||||||
from accounts.filters import AccountFilterSet
|
from accounts.filters import AccountFilterSet
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
from assets.models import Asset
|
from assets.models import Asset, Node
|
||||||
from common.permissions import UserConfirmation, ConfirmType
|
from common.permissions import UserConfirmation, ConfirmType
|
||||||
from common.views.mixins import RecordViewLogMixin
|
from common.views.mixins import RecordViewLogMixin
|
||||||
from orgs.mixins.api import OrgBulkModelViewSet
|
from orgs.mixins.api import OrgBulkModelViewSet
|
||||||
@@ -28,6 +28,7 @@ class AccountViewSet(OrgBulkModelViewSet):
|
|||||||
rbac_perms = {
|
rbac_perms = {
|
||||||
'partial_update': ['accounts.change_account'],
|
'partial_update': ['accounts.change_account'],
|
||||||
'su_from_accounts': 'accounts.view_account',
|
'su_from_accounts': 'accounts.view_account',
|
||||||
|
'username_suggestions': 'accounts.view_account',
|
||||||
}
|
}
|
||||||
|
|
||||||
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
@action(methods=['get'], detail=False, url_path='su-from-accounts')
|
||||||
@@ -47,6 +48,29 @@ class AccountViewSet(OrgBulkModelViewSet):
|
|||||||
serializer = serializers.AccountSerializer(accounts, many=True)
|
serializer = serializers.AccountSerializer(accounts, many=True)
|
||||||
return Response(data=serializer.data)
|
return Response(data=serializer.data)
|
||||||
|
|
||||||
|
@action(methods=['get'], detail=False, url_path='username-suggestions')
|
||||||
|
def username_suggestions(self, request, *args, **kwargs):
|
||||||
|
asset_ids = request.query_params.get('assets')
|
||||||
|
node_keys = request.query_params.get('keys')
|
||||||
|
username = request.query_params.get('username')
|
||||||
|
|
||||||
|
assets = Asset.objects.all()
|
||||||
|
if asset_ids:
|
||||||
|
assets = assets.filter(id__in=asset_ids.split(','))
|
||||||
|
if node_keys:
|
||||||
|
patten = Node.get_node_all_children_key_pattern(node_keys.split(','))
|
||||||
|
assets = assets.filter(nodes__key__regex=patten)
|
||||||
|
|
||||||
|
accounts = Account.objects.filter(asset__in=assets)
|
||||||
|
if username:
|
||||||
|
accounts = accounts.filter(username__icontains=username)
|
||||||
|
usernames = list(accounts.values_list('username', flat=True).distinct()[:10])
|
||||||
|
usernames.sort()
|
||||||
|
common = [i for i in usernames if i in usernames if i.lower() in ['root', 'admin', 'administrator']]
|
||||||
|
others = [i for i in usernames if i not in common]
|
||||||
|
usernames = common + others
|
||||||
|
return Response(data=usernames)
|
||||||
|
|
||||||
|
|
||||||
class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
|
||||||
"""
|
"""
|
||||||
|
|||||||
@@ -206,7 +206,7 @@ class ChangeSecretManager(AccountBasePlaybookManager):
|
|||||||
serializer = serializer_cls(recorders, many=True)
|
serializer = serializer_cls(recorders, many=True)
|
||||||
|
|
||||||
header = [str(v.label) for v in serializer.child.fields.values()]
|
header = [str(v.label) for v in serializer.child.fields.values()]
|
||||||
rows = [list(row.values()) for row in serializer.data]
|
rows = [[str(i) for i in row.values()] for row in serializer.data]
|
||||||
if not rows:
|
if not rows:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from orgs.utils import tmp_to_org, tmp_to_root_org
|
|||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def task_activity_callback(self, pid, trigger, tp):
|
def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
||||||
model = AutomationTypes.get_type_model(tp)
|
model = AutomationTypes.get_type_model(tp)
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
instance = get_object_or_none(model, pk=pid)
|
instance = get_object_or_none(model, pk=pid)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from orgs.utils import tmp_to_org, tmp_to_root_org
|
|||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def task_activity_callback(self, pid, trigger):
|
def task_activity_callback(self, pid, trigger, *args, **kwargs):
|
||||||
from accounts.models import AccountBackupAutomation
|
from accounts.models import AccountBackupAutomation
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
plan = get_object_or_none(AccountBackupAutomation, pk=pid)
|
plan = get_object_or_none(AccountBackupAutomation, pk=pid)
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ def gather_asset_accounts_util(nodes, task_name):
|
|||||||
|
|
||||||
@shared_task(
|
@shared_task(
|
||||||
queue="ansible", verbose_name=_('Gather asset accounts'),
|
queue="ansible", verbose_name=_('Gather asset accounts'),
|
||||||
activity_callback=lambda self, node_ids, task_name=None: (node_ids, None)
|
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None)
|
||||||
)
|
)
|
||||||
def gather_asset_accounts_task(node_ids, task_name=None):
|
def gather_asset_accounts_task(node_ids, task_name=None):
|
||||||
if task_name is None:
|
if task_name is None:
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ __all__ = [
|
|||||||
|
|
||||||
@shared_task(
|
@shared_task(
|
||||||
queue="ansible", verbose_name=_('Push accounts to assets'),
|
queue="ansible", verbose_name=_('Push accounts to assets'),
|
||||||
activity_callback=lambda self, account_ids, asset_ids: (account_ids, None)
|
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
|
||||||
)
|
)
|
||||||
def push_accounts_to_assets_task(account_ids):
|
def push_accounts_to_assets_task(account_ids):
|
||||||
from accounts.models import PushAccountAutomation
|
from accounts.models import PushAccountAutomation
|
||||||
|
|||||||
@@ -99,13 +99,14 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
|
|||||||
("platform", serializers.PlatformSerializer),
|
("platform", serializers.PlatformSerializer),
|
||||||
("suggestion", serializers.MiniAssetSerializer),
|
("suggestion", serializers.MiniAssetSerializer),
|
||||||
("gateways", serializers.GatewaySerializer),
|
("gateways", serializers.GatewaySerializer),
|
||||||
("spec_info", serializers.SpecSerializer)
|
("spec_info", serializers.SpecSerializer),
|
||||||
)
|
)
|
||||||
rbac_perms = (
|
rbac_perms = (
|
||||||
("match", "assets.match_asset"),
|
("match", "assets.match_asset"),
|
||||||
("platform", "assets.view_platform"),
|
("platform", "assets.view_platform"),
|
||||||
("gateways", "assets.view_gateway"),
|
("gateways", "assets.view_gateway"),
|
||||||
("spec_info", "assets.view_asset"),
|
("spec_info", "assets.view_asset"),
|
||||||
|
("info", "assets.view_asset"),
|
||||||
)
|
)
|
||||||
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
|
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]
|
||||||
|
|
||||||
|
|||||||
@@ -21,4 +21,10 @@ class HostViewSet(AssetViewSet):
|
|||||||
@action(methods=["GET"], detail=True, url_path="info")
|
@action(methods=["GET"], detail=True, url_path="info")
|
||||||
def info(self, *args, **kwargs):
|
def info(self, *args, **kwargs):
|
||||||
asset = super().get_object()
|
asset = super().get_object()
|
||||||
return Response(asset.info)
|
serializer = self.get_serializer(asset.info)
|
||||||
|
data = serializer.data
|
||||||
|
data['asset'] = {
|
||||||
|
'id': asset.id, 'name': asset.name,
|
||||||
|
'address': asset.address
|
||||||
|
}
|
||||||
|
return Response(data)
|
||||||
|
|||||||
@@ -93,7 +93,7 @@ class Migration(migrations.Migration):
|
|||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name='asset',
|
model_name='asset',
|
||||||
name='address',
|
name='address',
|
||||||
field=models.CharField(db_index=True, max_length=1024, verbose_name='Address'),
|
field=models.CharField(db_index=True, max_length=767, verbose_name='Address'),
|
||||||
),
|
),
|
||||||
migrations.AddField(
|
migrations.AddField(
|
||||||
model_name='asset',
|
model_name='asset',
|
||||||
|
|||||||
@@ -1,12 +1,15 @@
|
|||||||
# Generated by Django 3.2.12 on 2022-07-11 06:13
|
# Generated by Django 3.2.12 on 2022-07-11 06:13
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
from django.utils import timezone
|
||||||
|
from itertools import groupby
|
||||||
from django.db import migrations
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
def migrate_asset_accounts(apps, schema_editor):
|
def migrate_asset_accounts(apps, schema_editor):
|
||||||
auth_book_model = apps.get_model('assets', 'AuthBook')
|
auth_book_model = apps.get_model('assets', 'AuthBook')
|
||||||
account_model = apps.get_model('accounts', 'Account')
|
account_model = apps.get_model('accounts', 'Account')
|
||||||
|
account_history_model = apps.get_model('accounts', 'HistoricalAccount')
|
||||||
|
|
||||||
count = 0
|
count = 0
|
||||||
bulk_size = 1000
|
bulk_size = 1000
|
||||||
@@ -20,34 +23,35 @@ def migrate_asset_accounts(apps, schema_editor):
|
|||||||
break
|
break
|
||||||
|
|
||||||
count += len(auth_books)
|
count += len(auth_books)
|
||||||
accounts = []
|
|
||||||
# auth book 和 account 相同的属性
|
# auth book 和 account 相同的属性
|
||||||
same_attrs = [
|
same_attrs = [
|
||||||
'id', 'username', 'comment', 'date_created', 'date_updated',
|
'id', 'username', 'comment', 'date_created', 'date_updated',
|
||||||
'created_by', 'asset_id', 'org_id',
|
'created_by', 'asset_id', 'org_id',
|
||||||
]
|
]
|
||||||
# 认证的属性,可能是 authbook 的,可能是 systemuser 的
|
# 认证的属性,可能是 auth_book 的,可能是 system_user 的
|
||||||
auth_attrs = ['password', 'private_key', 'token']
|
auth_attrs = ['password', 'private_key', 'token']
|
||||||
all_attrs = same_attrs + auth_attrs
|
all_attrs = same_attrs + auth_attrs
|
||||||
|
|
||||||
|
accounts = []
|
||||||
for auth_book in auth_books:
|
for auth_book in auth_books:
|
||||||
values = {'version': 1}
|
account_values = {'version': 1}
|
||||||
|
|
||||||
system_user = auth_book.systemuser
|
system_user = auth_book.systemuser
|
||||||
if system_user:
|
if system_user:
|
||||||
# 更新一次系统用户的认证属性
|
# 更新一次系统用户的认证属性
|
||||||
values.update({attr: getattr(system_user, attr, '') for attr in all_attrs})
|
account_values.update({attr: getattr(system_user, attr, '') for attr in all_attrs})
|
||||||
values['created_by'] = str(system_user.id)
|
account_values['created_by'] = str(system_user.id)
|
||||||
values['privileged'] = system_user.type == 'admin'
|
account_values['privileged'] = system_user.type == 'admin' \
|
||||||
|
or system_user.username in ['root', 'Administrator']
|
||||||
|
|
||||||
auth_book_auth = {attr: getattr(auth_book, attr, '') for attr in all_attrs if getattr(auth_book, attr, '')}
|
auth_book_auth = {attr: getattr(auth_book, attr, '') for attr in all_attrs if getattr(auth_book, attr, '')}
|
||||||
# 最终使用 authbook 的认证属性
|
# 最终优先使用 auth_book 的认证属性
|
||||||
values.update(auth_book_auth)
|
account_values.update(auth_book_auth)
|
||||||
|
|
||||||
auth_infos = []
|
auth_infos = []
|
||||||
username = values['username']
|
username = account_values['username']
|
||||||
for attr in auth_attrs:
|
for attr in auth_attrs:
|
||||||
secret = values.pop(attr, None)
|
secret = account_values.pop(attr, None)
|
||||||
if not secret:
|
if not secret:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -66,13 +70,48 @@ def migrate_asset_accounts(apps, schema_editor):
|
|||||||
auth_infos.append((username, 'password', ''))
|
auth_infos.append((username, 'password', ''))
|
||||||
|
|
||||||
for name, secret_type, secret in auth_infos:
|
for name, secret_type, secret in auth_infos:
|
||||||
account = account_model(**values, name=name, secret=secret, secret_type=secret_type)
|
if not name:
|
||||||
|
continue
|
||||||
|
account = account_model(**account_values, name=name, secret=secret, secret_type=secret_type)
|
||||||
accounts.append(account)
|
accounts.append(account)
|
||||||
|
|
||||||
account_model.objects.bulk_create(accounts, ignore_conflicts=True)
|
accounts.sort(key=lambda x: (x.name, x.asset_id, x.date_updated))
|
||||||
|
grouped_accounts = groupby(accounts, lambda x: (x.name, x.asset_id))
|
||||||
|
|
||||||
|
accounts_to_add = []
|
||||||
|
accounts_to_history = []
|
||||||
|
for key, _accounts in grouped_accounts:
|
||||||
|
_accounts = list(_accounts)
|
||||||
|
if not _accounts:
|
||||||
|
continue
|
||||||
|
_account = _accounts[-1]
|
||||||
|
accounts_to_add.append(_account)
|
||||||
|
_account_history = []
|
||||||
|
|
||||||
|
for ac in _accounts:
|
||||||
|
if not ac.secret:
|
||||||
|
continue
|
||||||
|
if ac.id != _account.id and ac.secret == _account.secret:
|
||||||
|
continue
|
||||||
|
history_data = {
|
||||||
|
'id': _account.id,
|
||||||
|
'secret': ac.secret,
|
||||||
|
'secret_type': ac.secret_type,
|
||||||
|
'history_date': ac.date_updated,
|
||||||
|
'history_type': '~',
|
||||||
|
'history_change_reason': 'from account {}'.format(_account.name),
|
||||||
|
}
|
||||||
|
_account_history.append(account_history_model(**history_data))
|
||||||
|
_account.version = len(_account_history)
|
||||||
|
accounts_to_history.extend(_account_history)
|
||||||
|
|
||||||
|
account_model.objects.bulk_create(accounts_to_add, ignore_conflicts=True)
|
||||||
|
account_history_model.objects.bulk_create(accounts_to_history, ignore_conflicts=True)
|
||||||
print("\t - Create asset accounts: {}-{} using: {:.2f}s".format(
|
print("\t - Create asset accounts: {}-{} using: {:.2f}s".format(
|
||||||
count - len(auth_books), count, time.time() - start
|
count - len(auth_books), count, time.time() - start
|
||||||
))
|
))
|
||||||
|
print("\t - accounts: {}".format(len(accounts_to_add)))
|
||||||
|
print("\t - histories: {}".format(len(accounts_to_history)))
|
||||||
|
|
||||||
|
|
||||||
def migrate_db_accounts(apps, schema_editor):
|
def migrate_db_accounts(apps, schema_editor):
|
||||||
@@ -130,6 +169,9 @@ def migrate_db_accounts(apps, schema_editor):
|
|||||||
values['secret_type'] = secret_type
|
values['secret_type'] = secret_type
|
||||||
values['secret'] = secret
|
values['secret'] = secret
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
continue
|
||||||
|
|
||||||
for app in apps:
|
for app in apps:
|
||||||
values['asset_id'] = str(app.id)
|
values['asset_id'] = str(app.id)
|
||||||
account = account_model(**values)
|
account = account_model(**values)
|
||||||
|
|||||||
@@ -100,7 +100,7 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
|
|||||||
Type = const.AllTypes
|
Type = const.AllTypes
|
||||||
|
|
||||||
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
name = models.CharField(max_length=128, verbose_name=_('Name'))
|
||||||
address = models.CharField(max_length=1024, verbose_name=_('Address'), db_index=True)
|
address = models.CharField(max_length=767, verbose_name=_('Address'), db_index=True)
|
||||||
platform = models.ForeignKey(Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets')
|
platform = models.ForeignKey(Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets')
|
||||||
domain = models.ForeignKey("assets.Domain", null=True, blank=True, related_name='assets',
|
domain = models.ForeignKey("assets.Domain", null=True, blank=True, related_name='assets',
|
||||||
verbose_name=_("Domain"), on_delete=models.SET_NULL)
|
verbose_name=_("Domain"), on_delete=models.SET_NULL)
|
||||||
|
|||||||
@@ -489,7 +489,7 @@ class SomeNodesMixin:
|
|||||||
return cls.default_node()
|
return cls.default_node()
|
||||||
|
|
||||||
if ori_org and ori_org.is_root():
|
if ori_org and ori_org.is_root():
|
||||||
return None
|
return cls.default_node()
|
||||||
|
|
||||||
org_roots = cls.org_root_nodes()
|
org_roots = cls.org_root_nodes()
|
||||||
org_roots_length = len(org_roots)
|
org_roots_length = len(org_roots)
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from orgs.utils import tmp_to_root_org, tmp_to_org
|
|||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def task_activity_callback(self, pid, trigger, tp):
|
def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
|
||||||
model = AutomationTypes.get_type_model(tp)
|
model = AutomationTypes.get_type_model(tp)
|
||||||
with tmp_to_root_org():
|
with tmp_to_root_org():
|
||||||
instance = get_object_or_none(model, pk=pid)
|
instance = get_object_or_none(model, pk=pid)
|
||||||
|
|||||||
@@ -225,6 +225,7 @@ class MFAMixin:
|
|||||||
self.request.session['auth_mfa_time'] = time.time()
|
self.request.session['auth_mfa_time'] = time.time()
|
||||||
self.request.session['auth_mfa_required'] = 0
|
self.request.session['auth_mfa_required'] = 0
|
||||||
self.request.session['auth_mfa_type'] = mfa_type
|
self.request.session['auth_mfa_type'] = mfa_type
|
||||||
|
MFABlockUtils(self.request.user.username, self.get_request_ip()).clean_failed_count()
|
||||||
|
|
||||||
def clean_mfa_mark(self):
|
def clean_mfa_mark(self):
|
||||||
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']
|
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']
|
||||||
|
|||||||
@@ -32,11 +32,14 @@ class UserLoginMFAView(mixins.AuthMixin, FormView):
|
|||||||
return super().get(*args, **kwargs)
|
return super().get(*args, **kwargs)
|
||||||
|
|
||||||
def form_valid(self, form):
|
def form_valid(self, form):
|
||||||
|
from users.utils import MFABlockUtils
|
||||||
code = form.cleaned_data.get('code')
|
code = form.cleaned_data.get('code')
|
||||||
mfa_type = form.cleaned_data.get('mfa_type')
|
mfa_type = form.cleaned_data.get('mfa_type')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._do_check_user_mfa(code, mfa_type)
|
self._do_check_user_mfa(code, mfa_type)
|
||||||
|
user, ip = self.get_user_from_session(), self.get_request_ip()
|
||||||
|
MFABlockUtils(user.username, ip).clean_failed_count()
|
||||||
return redirect_to_guard_view('mfa_ok')
|
return redirect_to_guard_view('mfa_ok')
|
||||||
except (errors.MFAFailedError, errors.BlockMFAError) as e:
|
except (errors.MFAFailedError, errors.BlockMFAError) as e:
|
||||||
form.add_error('code', e.msg)
|
form.add_error('code', e.msg)
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from .utils import get_logger
|
|||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def task_activity_callback(self, subject, message, recipient_list, **kwargs):
|
def task_activity_callback(self, subject, message, recipient_list, *args, **kwargs):
|
||||||
from users.models import User
|
from users.models import User
|
||||||
email_list = recipient_list
|
email_list = recipient_list
|
||||||
resource_ids = list(User.objects.filter(email__in=email_list).values_list('id', flat=True))
|
resource_ids = list(User.objects.filter(email__in=email_list).values_list('id', flat=True))
|
||||||
|
|||||||
@@ -3,8 +3,10 @@
|
|||||||
from celery import shared_task
|
from celery import shared_task
|
||||||
from celery.exceptions import SoftTimeLimitExceeded
|
from celery.exceptions import SoftTimeLimitExceeded
|
||||||
from django.utils.translation import ugettext_lazy as _
|
from django.utils.translation import ugettext_lazy as _
|
||||||
|
from django_celery_beat.models import PeriodicTask
|
||||||
|
|
||||||
from common.utils import get_logger, get_object_or_none
|
from common.utils import get_logger, get_object_or_none
|
||||||
|
from ops.celery import app
|
||||||
from orgs.utils import tmp_to_org, tmp_to_root_org
|
from orgs.utils import tmp_to_org, tmp_to_root_org
|
||||||
from .celery.decorator import (
|
from .celery.decorator import (
|
||||||
register_as_period_task, after_app_ready_start
|
register_as_period_task, after_app_ready_start
|
||||||
@@ -19,7 +21,7 @@ from .notifications import ServerPerformanceCheckUtil
|
|||||||
logger = get_logger(__file__)
|
logger = get_logger(__file__)
|
||||||
|
|
||||||
|
|
||||||
def job_task_activity_callback(self, job_id, trigger):
|
def job_task_activity_callback(self, job_id, *args, **kwargs):
|
||||||
job = get_object_or_none(Job, id=job_id)
|
job = get_object_or_none(Job, id=job_id)
|
||||||
if not job:
|
if not job:
|
||||||
return
|
return
|
||||||
@@ -48,7 +50,7 @@ def run_ops_job(job_id):
|
|||||||
logger.error("Start adhoc execution error: {}".format(e))
|
logger.error("Start adhoc execution error: {}".format(e))
|
||||||
|
|
||||||
|
|
||||||
def job_execution_task_activity_callback(self, execution_id, trigger):
|
def job_execution_task_activity_callback(self, execution_id, *args, **kwargs):
|
||||||
execution = get_object_or_none(JobExecution, id=execution_id)
|
execution = get_object_or_none(JobExecution, id=execution_id)
|
||||||
if not execution:
|
if not execution:
|
||||||
return
|
return
|
||||||
@@ -78,16 +80,14 @@ def run_ops_job_execution(execution_id, **kwargs):
|
|||||||
@after_app_ready_start
|
@after_app_ready_start
|
||||||
def clean_celery_periodic_tasks():
|
def clean_celery_periodic_tasks():
|
||||||
"""清除celery定时任务"""
|
"""清除celery定时任务"""
|
||||||
need_cleaned_tasks = [
|
logger.info('Start clean celery periodic tasks.')
|
||||||
'handle_be_interrupted_change_auth_task_periodic',
|
register_tasks = PeriodicTask.objects.all()
|
||||||
]
|
for task in register_tasks:
|
||||||
logger.info('Start clean celery periodic tasks: {}'.format(need_cleaned_tasks))
|
if task.task in app.tasks:
|
||||||
for task_name in need_cleaned_tasks:
|
|
||||||
logger.info('Start clean task: {}'.format(task_name))
|
|
||||||
task = get_celery_periodic_task(task_name)
|
|
||||||
if task is None:
|
|
||||||
logger.info('Task does not exist: {}'.format(task_name))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
task_name = task.name
|
||||||
|
logger.info('Start clean task: {}'.format(task_name))
|
||||||
disable_celery_periodic_task(task_name)
|
disable_celery_periodic_task(task_name)
|
||||||
delete_celery_periodic_task(task_name)
|
delete_celery_periodic_task(task_name)
|
||||||
task = get_celery_periodic_task(task_name)
|
task = get_celery_periodic_task(task_name)
|
||||||
|
|||||||
@@ -114,9 +114,7 @@ class OrgResourceStatisticsCache(OrgRelatedCache):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_total_count_today_active_assets():
|
def compute_total_count_today_active_assets():
|
||||||
t = local_zero_hour()
|
t = local_zero_hour()
|
||||||
return Session.objects.filter(
|
return Session.objects.filter(date_start__gte=t).values('asset_id').distinct().count()
|
||||||
date_start__gte=t, is_success=False
|
|
||||||
).values('asset_id').distinct().count()
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def compute_total_count_today_failed_sessions():
|
def compute_total_count_today_failed_sessions():
|
||||||
|
|||||||
@@ -102,7 +102,10 @@ def on_post_delete_refresh_org_resource_statistics_cache(sender, instance, **kwa
|
|||||||
|
|
||||||
|
|
||||||
def _refresh_session_org_resource_statistics_cache(instance: Session):
|
def _refresh_session_org_resource_statistics_cache(instance: Session):
|
||||||
cache_field_name = ['total_count_online_users', 'total_count_online_sessions', 'total_count_today_failed_sessions']
|
cache_field_name = [
|
||||||
|
'total_count_online_users', 'total_count_online_sessions',
|
||||||
|
'total_count_today_active_assets','total_count_today_failed_sessions'
|
||||||
|
]
|
||||||
|
|
||||||
org_cache = OrgResourceStatisticsCache(instance.org)
|
org_cache = OrgResourceStatisticsCache(instance.org)
|
||||||
org_cache.expire(*cache_field_name)
|
org_cache.expire(*cache_field_name)
|
||||||
|
|||||||
@@ -30,6 +30,12 @@ class BaseUserPermedAssetsApi(SelfOrPKUserMixin, ListAPIView):
|
|||||||
filterset_class = AssetFilterSet
|
filterset_class = AssetFilterSet
|
||||||
serializer_class = serializers.AssetPermedSerializer
|
serializer_class = serializers.AssetPermedSerializer
|
||||||
|
|
||||||
|
def get_serializer_class(self):
|
||||||
|
serializer_class = super().get_serializer_class()
|
||||||
|
if self.request.query_params.get('id'):
|
||||||
|
serializer_class = serializers.AssetPermedDetailSerializer
|
||||||
|
return serializer_class
|
||||||
|
|
||||||
def get_queryset(self):
|
def get_queryset(self):
|
||||||
if getattr(self, 'swagger_fake_view', False):
|
if getattr(self, 'swagger_fake_view', False):
|
||||||
return Asset.objects.none()
|
return Asset.objects.none()
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ from perms.serializers.permission import ActionChoicesField
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'NodePermedSerializer', 'AssetPermedSerializer',
|
'NodePermedSerializer', 'AssetPermedSerializer',
|
||||||
'AccountsPermedSerializer'
|
'AssetPermedDetailSerializer', 'AccountsPermedSerializer'
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -46,6 +46,12 @@ class AssetPermedSerializer(OrgResourceModelSerializerMixin):
|
|||||||
return queryset
|
return queryset
|
||||||
|
|
||||||
|
|
||||||
|
class AssetPermedDetailSerializer(AssetPermedSerializer):
|
||||||
|
class Meta(AssetPermedSerializer.Meta):
|
||||||
|
fields = AssetPermedSerializer.Meta.fields + ['spec_info']
|
||||||
|
read_only_fields = fields
|
||||||
|
|
||||||
|
|
||||||
class NodePermedSerializer(serializers.ModelSerializer):
|
class NodePermedSerializer(serializers.ModelSerializer):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Node
|
model = Node
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from orgs.utils import tmp_to_org
|
||||||
from accounts.models import Account
|
from accounts.models import Account
|
||||||
from accounts.const import AliasAccount
|
from accounts.const import AliasAccount
|
||||||
from .permission import AssetPermissionUtil
|
from .permission import AssetPermissionUtil
|
||||||
@@ -16,6 +17,7 @@ class PermAccountUtil(AssetPermissionUtil):
|
|||||||
:param asset: Asset
|
:param asset: Asset
|
||||||
:param account_name: 可能是 @USER @INPUT 字符串
|
:param account_name: 可能是 @USER @INPUT 字符串
|
||||||
"""
|
"""
|
||||||
|
with tmp_to_org(asset.org):
|
||||||
permed_accounts = self.get_permed_accounts_for_user(user, asset)
|
permed_accounts = self.get_permed_accounts_for_user(user, asset)
|
||||||
accounts_mapper = {account.alias: account for account in permed_accounts}
|
accounts_mapper = {account.alias: account for account in permed_accounts}
|
||||||
account = accounts_mapper.get(account_name)
|
account = accounts_mapper.get(account_name)
|
||||||
|
|||||||
@@ -18,6 +18,10 @@ user_perms = (
|
|||||||
('assets', 'asset', 'match', 'asset'),
|
('assets', 'asset', 'match', 'asset'),
|
||||||
('assets', 'systemuser', 'match', 'systemuser'),
|
('assets', 'systemuser', 'match', 'systemuser'),
|
||||||
('assets', 'node', 'match', 'node'),
|
('assets', 'node', 'match', 'node'),
|
||||||
|
("ops", "adhoc", "*", "*"),
|
||||||
|
("ops", "playbook", "*", "*"),
|
||||||
|
("ops", "job", "*", "*"),
|
||||||
|
("ops", "jobexecution", "*", "*"),
|
||||||
)
|
)
|
||||||
|
|
||||||
system_user_perms = (
|
system_user_perms = (
|
||||||
@@ -41,7 +45,6 @@ auditor_perms = user_perms + _auditor_perms
|
|||||||
|
|
||||||
system_auditor_perms = system_user_perms + _auditor_perms + _view_root_perms
|
system_auditor_perms = system_user_perms + _auditor_perms + _view_root_perms
|
||||||
|
|
||||||
|
|
||||||
app_exclude_perms = [
|
app_exclude_perms = [
|
||||||
('users', 'user', 'add,delete', 'user'),
|
('users', 'user', 'add,delete', 'user'),
|
||||||
('orgs', 'org', 'add,delete,change', 'org'),
|
('orgs', 'org', 'add,delete,change', 'org'),
|
||||||
|
|||||||
@@ -97,13 +97,13 @@ class RBACPermission(permissions.DjangoModelPermissions):
|
|||||||
else:
|
else:
|
||||||
model_cls = queryset.model
|
model_cls = queryset.model
|
||||||
except AssertionError as e:
|
except AssertionError as e:
|
||||||
logger.error(f'Error get model cls: {e}')
|
# logger.error(f'Error get model cls: {e}')
|
||||||
model_cls = None
|
model_cls = None
|
||||||
except AttributeError as e:
|
except AttributeError as e:
|
||||||
logger.error(f'Error get model cls: {e}')
|
# logger.error(f'Error get model cls: {e}')
|
||||||
model_cls = None
|
model_cls = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error('Error get model class: {} of {}'.format(e, view))
|
# logger.error('Error get model class: {} of {}'.format(e, view))
|
||||||
raise e
|
raise e
|
||||||
return model_cls
|
return model_cls
|
||||||
|
|
||||||
|
|||||||
@@ -17,17 +17,17 @@ class WebMethod(TextChoices):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_methods(cls):
|
def get_methods(cls):
|
||||||
return {
|
methods = {
|
||||||
Protocol.ssh: [cls.web_cli, cls.web_sftp],
|
Protocol.ssh: [cls.web_cli, cls.web_sftp],
|
||||||
Protocol.telnet: [cls.web_cli],
|
Protocol.telnet: [cls.web_cli],
|
||||||
Protocol.rdp: [cls.web_gui],
|
Protocol.rdp: [cls.web_gui],
|
||||||
Protocol.vnc: [cls.web_gui],
|
Protocol.vnc: [cls.web_gui],
|
||||||
|
|
||||||
Protocol.mysql: [cls.web_cli, cls.web_gui],
|
Protocol.mysql: [cls.web_cli],
|
||||||
Protocol.mariadb: [cls.web_cli, cls.web_gui],
|
Protocol.mariadb: [cls.web_cli],
|
||||||
Protocol.oracle: [cls.web_cli, cls.web_gui],
|
Protocol.oracle: [cls.web_cli],
|
||||||
Protocol.postgresql: [cls.web_cli, cls.web_gui],
|
Protocol.postgresql: [cls.web_cli],
|
||||||
Protocol.sqlserver: [cls.web_cli, cls.web_gui],
|
Protocol.sqlserver: [cls.web_cli],
|
||||||
Protocol.redis: [cls.web_cli],
|
Protocol.redis: [cls.web_cli],
|
||||||
Protocol.mongodb: [cls.web_cli],
|
Protocol.mongodb: [cls.web_cli],
|
||||||
Protocol.clickhouse: [cls.web_cli],
|
Protocol.clickhouse: [cls.web_cli],
|
||||||
@@ -35,6 +35,13 @@ class WebMethod(TextChoices):
|
|||||||
Protocol.k8s: [cls.web_cli],
|
Protocol.k8s: [cls.web_cli],
|
||||||
Protocol.http: []
|
Protocol.http: []
|
||||||
}
|
}
|
||||||
|
if not settings.XPACK_ENABLED:
|
||||||
|
return methods
|
||||||
|
|
||||||
|
web_gui_dbs = [Protocol.mysql, Protocol.mariadb, Protocol.oracle, Protocol.postgresql]
|
||||||
|
for db in web_gui_dbs:
|
||||||
|
methods[db].append(cls.web_gui)
|
||||||
|
return methods
|
||||||
|
|
||||||
|
|
||||||
class NativeClient(TextChoices):
|
class NativeClient(TextChoices):
|
||||||
@@ -130,8 +137,6 @@ class AppletMethod:
|
|||||||
from .models import Applet, AppletHost
|
from .models import Applet, AppletHost
|
||||||
|
|
||||||
methods = defaultdict(list)
|
methods = defaultdict(list)
|
||||||
if not settings.XPACK_ENABLED:
|
|
||||||
return methods
|
|
||||||
|
|
||||||
has_applet_hosts = AppletHost.objects.all().exists()
|
has_applet_hosts = AppletHost.objects.all().exists()
|
||||||
applets = Applet.objects.filter(is_active=True)
|
applets = Applet.objects.filter(is_active=True)
|
||||||
|
|||||||
@@ -80,14 +80,20 @@ def upload_session_replay_to_external_storage(session_id):
|
|||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
@shared_task(verbose_name=_('Run applet host deployment'), activity_callback=lambda did: ([did], ))
|
@shared_task(
|
||||||
|
verbose_name=_('Run applet host deployment'),
|
||||||
|
activity_callback=lambda self, did, *args, **kwargs: ([did], )
|
||||||
|
)
|
||||||
def run_applet_host_deployment(did):
|
def run_applet_host_deployment(did):
|
||||||
with tmp_to_builtin_org(system=1):
|
with tmp_to_builtin_org(system=1):
|
||||||
deployment = AppletHostDeployment.objects.get(id=did)
|
deployment = AppletHostDeployment.objects.get(id=did)
|
||||||
deployment.start()
|
deployment.start()
|
||||||
|
|
||||||
|
|
||||||
@shared_task(verbose_name=_('Install applet'), activity_callback=lambda did, applet_id: ([did],))
|
@shared_task(
|
||||||
|
verbose_name=_('Install applet'),
|
||||||
|
activity_callback=lambda self, did, applet_id, *args, **kwargs: ([did],)
|
||||||
|
)
|
||||||
def run_applet_host_deployment_install_applet(did, applet_id):
|
def run_applet_host_deployment_install_applet(did, applet_id):
|
||||||
with tmp_to_builtin_org(system=1):
|
with tmp_to_builtin_org(system=1):
|
||||||
deployment = AppletHostDeployment.objects.get(id=did)
|
deployment = AppletHostDeployment.objects.get(id=did)
|
||||||
|
|||||||
Reference in New Issue
Block a user