Compare commits

...

24 Commits

Author SHA1 Message Date
fit2bot
e4cf81d96c feat: Update v3.0.2 2023-02-28 18:55:15 +08:00
fit2bot
a098bc6c06 perf: 推送账号 社区版定时任务关闭 (#9805)
Co-authored-by: feng <1304903146@qq.com>
2023-02-28 13:43:38 +08:00
老广
86870ad985 Merge pull request #9798 from jumpserver/pr@v3.0@fix_protocol_init_error
perf: 修改协议创建时一些默认值
2023-02-28 09:45:15 +08:00
ibuler
c602bf7224 perf: 修改协议创建时一些默认值 2023-02-27 11:49:04 +00:00
fit2bot
86dab4fc6e perf: 今日活跃资产 (#9797)
Co-authored-by: feng <1304903146@qq.com>
2023-02-27 18:10:11 +08:00
Aaron3S
a85a80a945 fix: 默认增加普通用户作业中心权限 2023-02-27 17:28:04 +08:00
老广
349edc10aa Merge pull request #9791 from jumpserver/pr@v3.0@add_accounts_suggestions
perf: 添加账号用户名的推荐
2023-02-27 15:19:26 +08:00
ibuler
44918e3cb5 perf: 添加账号用户名的推荐
perf: 修改账号推荐
2023-02-27 07:14:55 +00:00
ibuler
9a2f6c0d70 perf: 修改资产 address 长度,以支持 mb4
perf: 修改长度
2023-02-27 14:08:15 +08:00
ibuler
934969a8f1 perf: 去掉没有 Name 的迁移 2023-02-27 14:02:09 +08:00
老广
57162c1628 Merge pull request #9776 from jumpserver/pr@v3.0@perf_account_migrate2
perf: 优化迁移 accounts
2023-02-27 10:22:59 +08:00
ibuler
32fb36867f perf: 优化迁移 accounts
perf: 优化账号迁移,同名的迁移到历史中
2023-02-26 01:49:25 +00:00
老广
158b589028 Merge pull request #9761 from jumpserver/pr@v3@fix_activity_save_error
fix: 解决Activity保存因为参数出错问题
2023-02-24 18:18:03 +08:00
jiangweidong
d64277353c Merge branch 'v3.0' of http://github.com/jumpserver/jumpserver into pr@v3@fix_activity_save_error 2023-02-24 18:10:47 +08:00
jiangweidong
bff6f397ce fix: 解决Activity保存因为参数出错问题 2023-02-24 18:10:42 +08:00
fit2bot
0ad461a804 perf: 修改host info 接口, 社区开放applet, 修改改密发邮件bug (#9760)
Co-authored-by: feng <1304903146@qq.com>
2023-02-24 18:08:40 +08:00
Bai
a1dcef0ba0 fix: 修复 web gui 支持的数据库 2023-02-24 15:12:08 +08:00
Bai
dbb1ee3a75 fix: 修复认证MFA失败次数清空问题 2023-02-24 14:43:51 +08:00
fit2bot
d6bd207a17 fix: 修复计算今日活跃资产过滤逻辑 (#9744)
Co-authored-by: Bai <baijiangjie@gmail.com>
2023-02-24 12:17:10 +08:00
Bai
e69ba27ff4 fix: 修复获取授权资产详情时返回 spec_info 字段, 解决连接 Magnus 问题 2023-02-24 11:41:47 +08:00
ibuler
adbe7c07c6 perf: 修复社区版可能引起的问题 2023-02-24 00:31:10 +08:00
老广
d1eacf53d4 Merge pull request #9736 from jumpserver/dev
fix: 修复 loong64 grpc 构建失败
2023-02-23 21:50:11 +08:00
老广
472c14fd27 Merge pull request #9734 from jumpserver/pr@dev@fix_build_grpc
fix: 修复 loong64 grpc 构建失败
2023-02-23 21:46:26 +08:00
吴小白
9d4854b8c3 fix: 修复 loong64 grpc 构建失败 2023-02-23 21:34:21 +08:00
30 changed files with 178 additions and 66 deletions

View File

@@ -24,6 +24,7 @@ ARG DEPENDENCIES=" \
libjpeg-dev \
libldap2-dev \
libsasl2-dev \
libssl-dev \
libxml2-dev \
libxmlsec1-dev \
libxmlsec1-openssl \

1
GITSHA Normal file
View File

@@ -0,0 +1 @@
a098bc6c06c9762b7d596d28865d5e5f1afe033e

View File

@@ -6,7 +6,7 @@ from rest_framework.response import Response
from accounts import serializers
from accounts.filters import AccountFilterSet
from accounts.models import Account
from assets.models import Asset
from assets.models import Asset, Node
from common.permissions import UserConfirmation, ConfirmType
from common.views.mixins import RecordViewLogMixin
from orgs.mixins.api import OrgBulkModelViewSet
@@ -28,6 +28,7 @@ class AccountViewSet(OrgBulkModelViewSet):
rbac_perms = {
'partial_update': ['accounts.change_account'],
'su_from_accounts': 'accounts.view_account',
'username_suggestions': 'accounts.view_account',
}
@action(methods=['get'], detail=False, url_path='su-from-accounts')
@@ -47,6 +48,29 @@ class AccountViewSet(OrgBulkModelViewSet):
serializer = serializers.AccountSerializer(accounts, many=True)
return Response(data=serializer.data)
@action(methods=['get'], detail=False, url_path='username-suggestions')
def username_suggestions(self, request, *args, **kwargs):
asset_ids = request.query_params.get('assets')
node_keys = request.query_params.get('keys')
username = request.query_params.get('username')
assets = Asset.objects.all()
if asset_ids:
assets = assets.filter(id__in=asset_ids.split(','))
if node_keys:
patten = Node.get_node_all_children_key_pattern(node_keys.split(','))
assets = assets.filter(nodes__key__regex=patten)
accounts = Account.objects.filter(asset__in=assets)
if username:
accounts = accounts.filter(username__icontains=username)
usernames = list(accounts.values_list('username', flat=True).distinct()[:10])
usernames.sort()
common = [i for i in usernames if i in usernames if i.lower() in ['root', 'admin', 'administrator']]
others = [i for i in usernames if i not in common]
usernames = common + others
return Response(data=usernames)
class AccountSecretsViewSet(RecordViewLogMixin, AccountViewSet):
"""

View File

@@ -206,7 +206,7 @@ class ChangeSecretManager(AccountBasePlaybookManager):
serializer = serializer_cls(recorders, many=True)
header = [str(v.label) for v in serializer.child.fields.values()]
rows = [list(row.values()) for row in serializer.data]
rows = [[str(i) for i in row.values()] for row in serializer.data]
if not rows:
return False

View File

@@ -2,6 +2,7 @@ from django.db import models
from django.utils.translation import ugettext_lazy as _
from accounts.const import AutomationTypes
from jumpserver.utils import has_valid_xpack_license
from .base import AccountBaseAutomation
from .change_secret import ChangeSecretMixin
@@ -27,6 +28,8 @@ class PushAccountAutomation(ChangeSecretMixin, AccountBaseAutomation):
def save(self, *args, **kwargs):
self.type = AutomationTypes.push_account
if not has_valid_xpack_license():
self.is_periodic = False
super().save(*args, **kwargs)
def to_attr_json(self):

View File

@@ -8,7 +8,7 @@ from orgs.utils import tmp_to_org, tmp_to_root_org
logger = get_logger(__file__)
def task_activity_callback(self, pid, trigger, tp):
def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
model = AutomationTypes.get_type_model(tp)
with tmp_to_root_org():
instance = get_object_or_none(model, pk=pid)

View File

@@ -9,7 +9,7 @@ from orgs.utils import tmp_to_org, tmp_to_root_org
logger = get_logger(__file__)
def task_activity_callback(self, pid, trigger):
def task_activity_callback(self, pid, trigger, *args, **kwargs):
from accounts.models import AccountBackupAutomation
with tmp_to_root_org():
plan = get_object_or_none(AccountBackupAutomation, pk=pid)

View File

@@ -27,7 +27,7 @@ def gather_asset_accounts_util(nodes, task_name):
@shared_task(
queue="ansible", verbose_name=_('Gather asset accounts'),
activity_callback=lambda self, node_ids, task_name=None: (node_ids, None)
activity_callback=lambda self, node_ids, task_name=None, *args, **kwargs: (node_ids, None)
)
def gather_asset_accounts_task(node_ids, task_name=None):
if task_name is None:

View File

@@ -13,7 +13,7 @@ __all__ = [
@shared_task(
queue="ansible", verbose_name=_('Push accounts to assets'),
activity_callback=lambda self, account_ids, asset_ids: (account_ids, None)
activity_callback=lambda self, account_ids, *args, **kwargs: (account_ids, None)
)
def push_accounts_to_assets_task(account_ids):
from accounts.models import PushAccountAutomation

View File

@@ -99,13 +99,14 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
("platform", serializers.PlatformSerializer),
("suggestion", serializers.MiniAssetSerializer),
("gateways", serializers.GatewaySerializer),
("spec_info", serializers.SpecSerializer)
("spec_info", serializers.SpecSerializer),
)
rbac_perms = (
("match", "assets.match_asset"),
("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"),
("spec_info", "assets.view_asset"),
("info", "assets.view_asset"),
)
extra_filter_backends = [LabelFilterBackend, IpInFilterBackend, NodeFilterBackend]

View File

@@ -21,4 +21,10 @@ class HostViewSet(AssetViewSet):
@action(methods=["GET"], detail=True, url_path="info")
def info(self, *args, **kwargs):
asset = super().get_object()
return Response(asset.info)
serializer = self.get_serializer(asset.info)
data = serializer.data
data['asset'] = {
'id': asset.id, 'name': asset.name,
'address': asset.address
}
return Response(data)

View File

@@ -306,10 +306,11 @@ class AllTypes(ChoicesMixin):
protocols_data = deepcopy(default_protocols)
if _protocols:
protocols_data = [p for p in protocols_data if p['name'] in _protocols]
for p in protocols_data:
setting = _protocols_setting.get(p['name'], {})
p['required'] = p.pop('required', False)
p['default'] = p.pop('default', False)
p['required'] = setting.pop('required', False)
p['default'] = setting.pop('default', False)
p['setting'] = {**p.get('setting', {}), **setting}
platform_data = {

View File

@@ -93,7 +93,7 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='asset',
name='address',
field=models.CharField(db_index=True, max_length=1024, verbose_name='Address'),
field=models.CharField(db_index=True, max_length=767, verbose_name='Address'),
),
migrations.AddField(
model_name='asset',

View File

@@ -1,12 +1,15 @@
# Generated by Django 3.2.12 on 2022-07-11 06:13
import time
from django.utils import timezone
from itertools import groupby
from django.db import migrations
def migrate_asset_accounts(apps, schema_editor):
auth_book_model = apps.get_model('assets', 'AuthBook')
account_model = apps.get_model('accounts', 'Account')
account_history_model = apps.get_model('accounts', 'HistoricalAccount')
count = 0
bulk_size = 1000
@@ -20,34 +23,35 @@ def migrate_asset_accounts(apps, schema_editor):
break
count += len(auth_books)
accounts = []
# auth book 和 account 相同的属性
same_attrs = [
'id', 'username', 'comment', 'date_created', 'date_updated',
'created_by', 'asset_id', 'org_id',
]
# 认证的属性,可能是 authbook 的,可能是 systemuser 的
# 认证的属性,可能是 auth_book 的,可能是 system_user 的
auth_attrs = ['password', 'private_key', 'token']
all_attrs = same_attrs + auth_attrs
accounts = []
for auth_book in auth_books:
values = {'version': 1}
account_values = {'version': 1}
system_user = auth_book.systemuser
if system_user:
# 更新一次系统用户的认证属性
values.update({attr: getattr(system_user, attr, '') for attr in all_attrs})
values['created_by'] = str(system_user.id)
values['privileged'] = system_user.type == 'admin'
account_values.update({attr: getattr(system_user, attr, '') for attr in all_attrs})
account_values['created_by'] = str(system_user.id)
account_values['privileged'] = system_user.type == 'admin' \
or system_user.username in ['root', 'Administrator']
auth_book_auth = {attr: getattr(auth_book, attr, '') for attr in all_attrs if getattr(auth_book, attr, '')}
# 最终使用 authbook 的认证属性
values.update(auth_book_auth)
# 最终优先使用 auth_book 的认证属性
account_values.update(auth_book_auth)
auth_infos = []
username = values['username']
username = account_values['username']
for attr in auth_attrs:
secret = values.pop(attr, None)
secret = account_values.pop(attr, None)
if not secret:
continue
@@ -66,13 +70,48 @@ def migrate_asset_accounts(apps, schema_editor):
auth_infos.append((username, 'password', ''))
for name, secret_type, secret in auth_infos:
account = account_model(**values, name=name, secret=secret, secret_type=secret_type)
if not name:
continue
account = account_model(**account_values, name=name, secret=secret, secret_type=secret_type)
accounts.append(account)
account_model.objects.bulk_create(accounts, ignore_conflicts=True)
accounts.sort(key=lambda x: (x.name, x.asset_id, x.date_updated))
grouped_accounts = groupby(accounts, lambda x: (x.name, x.asset_id))
accounts_to_add = []
accounts_to_history = []
for key, _accounts in grouped_accounts:
_accounts = list(_accounts)
if not _accounts:
continue
_account = _accounts[-1]
accounts_to_add.append(_account)
_account_history = []
for ac in _accounts:
if not ac.secret:
continue
if ac.id != _account.id and ac.secret == _account.secret:
continue
history_data = {
'id': _account.id,
'secret': ac.secret,
'secret_type': ac.secret_type,
'history_date': ac.date_updated,
'history_type': '~',
'history_change_reason': 'from account {}'.format(_account.name),
}
_account_history.append(account_history_model(**history_data))
_account.version = len(_account_history)
accounts_to_history.extend(_account_history)
account_model.objects.bulk_create(accounts_to_add, ignore_conflicts=True)
account_history_model.objects.bulk_create(accounts_to_history, ignore_conflicts=True)
print("\t - Create asset accounts: {}-{} using: {:.2f}s".format(
count - len(auth_books), count, time.time() - start
))
print("\t - accounts: {}".format(len(accounts_to_add)))
print("\t - histories: {}".format(len(accounts_to_history)))
def migrate_db_accounts(apps, schema_editor):
@@ -130,6 +169,9 @@ def migrate_db_accounts(apps, schema_editor):
values['secret_type'] = secret_type
values['secret'] = secret
if not name:
continue
for app in apps:
values['asset_id'] = str(app.id)
account = account_model(**values)

View File

@@ -100,7 +100,7 @@ class Asset(NodesRelationMixin, AbsConnectivity, JMSOrgBaseModel):
Type = const.AllTypes
name = models.CharField(max_length=128, verbose_name=_('Name'))
address = models.CharField(max_length=1024, verbose_name=_('Address'), db_index=True)
address = models.CharField(max_length=767, verbose_name=_('Address'), db_index=True)
platform = models.ForeignKey(Platform, on_delete=models.PROTECT, verbose_name=_("Platform"), related_name='assets')
domain = models.ForeignKey("assets.Domain", null=True, blank=True, related_name='assets',
verbose_name=_("Domain"), on_delete=models.SET_NULL)

View File

@@ -489,7 +489,7 @@ class SomeNodesMixin:
return cls.default_node()
if ori_org and ori_org.is_root():
return None
return cls.default_node()
org_roots = cls.org_root_nodes()
org_roots_length = len(org_roots)

View File

@@ -8,7 +8,7 @@ from orgs.utils import tmp_to_root_org, tmp_to_org
logger = get_logger(__file__)
def task_activity_callback(self, pid, trigger, tp):
def task_activity_callback(self, pid, trigger, tp, *args, **kwargs):
model = AutomationTypes.get_type_model(tp)
with tmp_to_root_org():
instance = get_object_or_none(model, pk=pid)

View File

@@ -225,6 +225,7 @@ class MFAMixin:
self.request.session['auth_mfa_time'] = time.time()
self.request.session['auth_mfa_required'] = 0
self.request.session['auth_mfa_type'] = mfa_type
MFABlockUtils(self.request.user.username, self.get_request_ip()).clean_failed_count()
def clean_mfa_mark(self):
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']

View File

@@ -32,11 +32,14 @@ class UserLoginMFAView(mixins.AuthMixin, FormView):
return super().get(*args, **kwargs)
def form_valid(self, form):
from users.utils import MFABlockUtils
code = form.cleaned_data.get('code')
mfa_type = form.cleaned_data.get('mfa_type')
try:
self._do_check_user_mfa(code, mfa_type)
user, ip = self.get_user_from_session(), self.get_request_ip()
MFABlockUtils(user.username, ip).clean_failed_count()
return redirect_to_guard_view('mfa_ok')
except (errors.MFAFailedError, errors.BlockMFAError) as e:
form.add_error('code', e.msg)

View File

@@ -10,7 +10,7 @@ from .utils import get_logger
logger = get_logger(__file__)
def task_activity_callback(self, subject, message, recipient_list, **kwargs):
def task_activity_callback(self, subject, message, recipient_list, *args, **kwargs):
from users.models import User
email_list = recipient_list
resource_ids = list(User.objects.filter(email__in=email_list).values_list('id', flat=True))

View File

@@ -3,8 +3,10 @@
from celery import shared_task
from celery.exceptions import SoftTimeLimitExceeded
from django.utils.translation import ugettext_lazy as _
from django_celery_beat.models import PeriodicTask
from common.utils import get_logger, get_object_or_none
from ops.celery import app
from orgs.utils import tmp_to_org, tmp_to_root_org
from .celery.decorator import (
register_as_period_task, after_app_ready_start
@@ -19,7 +21,7 @@ from .notifications import ServerPerformanceCheckUtil
logger = get_logger(__file__)
def job_task_activity_callback(self, job_id, trigger):
def job_task_activity_callback(self, job_id, *args, **kwargs):
job = get_object_or_none(Job, id=job_id)
if not job:
return
@@ -48,7 +50,7 @@ def run_ops_job(job_id):
logger.error("Start adhoc execution error: {}".format(e))
def job_execution_task_activity_callback(self, execution_id, trigger):
def job_execution_task_activity_callback(self, execution_id, *args, **kwargs):
execution = get_object_or_none(JobExecution, id=execution_id)
if not execution:
return
@@ -78,16 +80,14 @@ def run_ops_job_execution(execution_id, **kwargs):
@after_app_ready_start
def clean_celery_periodic_tasks():
"""清除celery定时任务"""
need_cleaned_tasks = [
'handle_be_interrupted_change_auth_task_periodic',
]
logger.info('Start clean celery periodic tasks: {}'.format(need_cleaned_tasks))
for task_name in need_cleaned_tasks:
logger.info('Start clean task: {}'.format(task_name))
task = get_celery_periodic_task(task_name)
if task is None:
logger.info('Task does not exist: {}'.format(task_name))
logger.info('Start clean celery periodic tasks.')
register_tasks = PeriodicTask.objects.all()
for task in register_tasks:
if task.task in app.tasks:
continue
task_name = task.name
logger.info('Start clean task: {}'.format(task_name))
disable_celery_periodic_task(task_name)
delete_celery_periodic_task(task_name)
task = get_celery_periodic_task(task_name)

View File

@@ -114,9 +114,7 @@ class OrgResourceStatisticsCache(OrgRelatedCache):
@staticmethod
def compute_total_count_today_active_assets():
t = local_zero_hour()
return Session.objects.filter(
date_start__gte=t, is_success=False
).values('asset_id').distinct().count()
return Session.objects.filter(date_start__gte=t).values('asset_id').distinct().count()
@staticmethod
def compute_total_count_today_failed_sessions():

View File

@@ -102,7 +102,10 @@ def on_post_delete_refresh_org_resource_statistics_cache(sender, instance, **kwa
def _refresh_session_org_resource_statistics_cache(instance: Session):
cache_field_name = ['total_count_online_users', 'total_count_online_sessions', 'total_count_today_failed_sessions']
cache_field_name = [
'total_count_online_users', 'total_count_online_sessions',
'total_count_today_active_assets','total_count_today_failed_sessions'
]
org_cache = OrgResourceStatisticsCache(instance.org)
org_cache.expire(*cache_field_name)

View File

@@ -30,6 +30,12 @@ class BaseUserPermedAssetsApi(SelfOrPKUserMixin, ListAPIView):
filterset_class = AssetFilterSet
serializer_class = serializers.AssetPermedSerializer
def get_serializer_class(self):
serializer_class = super().get_serializer_class()
if self.request.query_params.get('id'):
serializer_class = serializers.AssetPermedDetailSerializer
return serializer_class
def get_queryset(self):
if getattr(self, 'swagger_fake_view', False):
return Asset.objects.none()

View File

@@ -15,7 +15,7 @@ from perms.serializers.permission import ActionChoicesField
__all__ = [
'NodePermedSerializer', 'AssetPermedSerializer',
'AccountsPermedSerializer'
'AssetPermedDetailSerializer', 'AccountsPermedSerializer'
]
@@ -46,6 +46,12 @@ class AssetPermedSerializer(OrgResourceModelSerializerMixin):
return queryset
class AssetPermedDetailSerializer(AssetPermedSerializer):
class Meta(AssetPermedSerializer.Meta):
fields = AssetPermedSerializer.Meta.fields + ['spec_info']
read_only_fields = fields
class NodePermedSerializer(serializers.ModelSerializer):
class Meta:
model = Node

View File

@@ -1,5 +1,6 @@
from collections import defaultdict
from orgs.utils import tmp_to_org
from accounts.models import Account
from accounts.const import AliasAccount
from .permission import AssetPermissionUtil
@@ -16,10 +17,11 @@ class PermAccountUtil(AssetPermissionUtil):
:param asset: Asset
:param account_name: 可能是 @USER @INPUT 字符串
"""
permed_accounts = self.get_permed_accounts_for_user(user, asset)
accounts_mapper = {account.alias: account for account in permed_accounts}
account = accounts_mapper.get(account_name)
return account
with tmp_to_org(asset.org):
permed_accounts = self.get_permed_accounts_for_user(user, asset)
accounts_mapper = {account.alias: account for account in permed_accounts}
account = accounts_mapper.get(account_name)
return account
def get_permed_accounts_for_user(self, user, asset):
""" 获取授权给用户某个资产的账号 """

View File

@@ -18,14 +18,18 @@ user_perms = (
('assets', 'asset', 'match', 'asset'),
('assets', 'systemuser', 'match', 'systemuser'),
('assets', 'node', 'match', 'node'),
("ops", "adhoc", "*", "*"),
("ops", "playbook", "*", "*"),
("ops", "job", "*", "*"),
("ops", "jobexecution", "*", "*"),
)
system_user_perms = (
('authentication', 'connectiontoken', 'add,change,view', 'connectiontoken'),
('authentication', 'temptoken', 'add,change,view', 'temptoken'),
('authentication', 'accesskey', '*', '*'),
('tickets', 'ticket', 'view', 'ticket'),
) + user_perms + _view_all_joined_org_perms
('authentication', 'connectiontoken', 'add,change,view', 'connectiontoken'),
('authentication', 'temptoken', 'add,change,view', 'temptoken'),
('authentication', 'accesskey', '*', '*'),
('tickets', 'ticket', 'view', 'ticket'),
) + user_perms + _view_all_joined_org_perms
_auditor_perms = (
('rbac', 'menupermission', 'view', 'audit'),
@@ -41,7 +45,6 @@ auditor_perms = user_perms + _auditor_perms
system_auditor_perms = system_user_perms + _auditor_perms + _view_root_perms
app_exclude_perms = [
('users', 'user', 'add,delete', 'user'),
('orgs', 'org', 'add,delete,change', 'org'),

View File

@@ -97,13 +97,13 @@ class RBACPermission(permissions.DjangoModelPermissions):
else:
model_cls = queryset.model
except AssertionError as e:
logger.error(f'Error get model cls: {e}')
# logger.error(f'Error get model cls: {e}')
model_cls = None
except AttributeError as e:
logger.error(f'Error get model cls: {e}')
# logger.error(f'Error get model cls: {e}')
model_cls = None
except Exception as e:
logger.error('Error get model class: {} of {}'.format(e, view))
# logger.error('Error get model class: {} of {}'.format(e, view))
raise e
return model_cls

View File

@@ -17,17 +17,17 @@ class WebMethod(TextChoices):
@classmethod
def get_methods(cls):
return {
methods = {
Protocol.ssh: [cls.web_cli, cls.web_sftp],
Protocol.telnet: [cls.web_cli],
Protocol.rdp: [cls.web_gui],
Protocol.vnc: [cls.web_gui],
Protocol.mysql: [cls.web_cli, cls.web_gui],
Protocol.mariadb: [cls.web_cli, cls.web_gui],
Protocol.oracle: [cls.web_cli, cls.web_gui],
Protocol.postgresql: [cls.web_cli, cls.web_gui],
Protocol.sqlserver: [cls.web_cli, cls.web_gui],
Protocol.mysql: [cls.web_cli],
Protocol.mariadb: [cls.web_cli],
Protocol.oracle: [cls.web_cli],
Protocol.postgresql: [cls.web_cli],
Protocol.sqlserver: [cls.web_cli],
Protocol.redis: [cls.web_cli],
Protocol.mongodb: [cls.web_cli],
Protocol.clickhouse: [cls.web_cli],
@@ -35,6 +35,13 @@ class WebMethod(TextChoices):
Protocol.k8s: [cls.web_cli],
Protocol.http: []
}
if not settings.XPACK_ENABLED:
return methods
web_gui_dbs = [Protocol.mysql, Protocol.mariadb, Protocol.oracle, Protocol.postgresql]
for db in web_gui_dbs:
methods[db].append(cls.web_gui)
return methods
class NativeClient(TextChoices):
@@ -130,8 +137,6 @@ class AppletMethod:
from .models import Applet, AppletHost
methods = defaultdict(list)
if not settings.XPACK_ENABLED:
return methods
has_applet_hosts = AppletHost.objects.all().exists()
applets = Applet.objects.filter(is_active=True)

View File

@@ -80,14 +80,20 @@ def upload_session_replay_to_external_storage(session_id):
return
@shared_task(verbose_name=_('Run applet host deployment'), activity_callback=lambda did: ([did], ))
@shared_task(
verbose_name=_('Run applet host deployment'),
activity_callback=lambda self, did, *args, **kwargs: ([did], )
)
def run_applet_host_deployment(did):
with tmp_to_builtin_org(system=1):
deployment = AppletHostDeployment.objects.get(id=did)
deployment.start()
@shared_task(verbose_name=_('Install applet'), activity_callback=lambda did, applet_id: ([did],))
@shared_task(
verbose_name=_('Install applet'),
activity_callback=lambda self, did, applet_id, *args, **kwargs: ([did],)
)
def run_applet_host_deployment_install_applet(did, applet_id):
with tmp_to_builtin_org(system=1):
deployment = AppletHostDeployment.objects.get(id=did)