Compare commits

..

39 Commits

Author SHA1 Message Date
feng
3b24dd38b2 perf: Translate 2025-06-27 11:39:53 +08:00
feng
1a78eb8bb7 fix: ES search session count 2025-06-27 10:32:02 +08:00
ibuler
ede5fd906c fix: bitwardne request data encode 2025-06-26 15:23:46 +08:00
ibuler
0e15ba2197 perf: change some 18n 2025-06-26 14:53:32 +08:00
Bryan
a9399dd709 Merge pull request #15608 from jumpserver/dev
v4.10.2
2025-06-19 20:14:21 +08:00
Bryan
d0cb9e5432 Merge pull request #15412 from jumpserver/dev
v4.10.0
2025-05-15 17:11:43 +08:00
老广
558188da90 merge: dev to master
Ready to relase
2025-04-17 20:24:45 +08:00
Bryan
ad5460dab8 Merge pull request #15086 from jumpserver/dev
v4.8.0
2025-03-20 18:44:44 +08:00
Bryan
4d37dca0de Merge pull request #14901 from jumpserver/dev
v4.7.0
2025-02-20 10:21:16 +08:00
Bryan
2ca4002624 Merge pull request #14813 from jumpserver/dev
v4.6.0
2025-01-15 14:38:17 +08:00
Bryan
053d640e4c Merge pull request #14699 from jumpserver/dev
v4.5.0
2024-12-19 16:04:45 +08:00
Bryan
f3acc28ded Merge pull request #14697 from jumpserver/dev
v4.5.0
2024-12-19 15:57:11 +08:00
Bryan
25987545db Merge pull request #14511 from jumpserver/dev
v4.4.0
2024-11-21 19:00:35 +08:00
Bryan
6720ecc6e0 Merge pull request #14319 from jumpserver/dev
v4.3.0
2024-10-17 14:55:38 +08:00
老广
0b3a7bb020 Merge pull request #14203 from jumpserver/dev
merge: from dev to master
2024-09-19 19:37:19 +08:00
Bryan
56373e362b Merge pull request #13988 from jumpserver/dev
v4.1.0
2024-08-16 18:40:35 +08:00
Bryan
02fc045370 Merge pull request #13600 from jumpserver/dev
v4.0.0
2024-07-03 19:04:35 +08:00
Bryan
e4ac73896f Merge pull request #13452 from jumpserver/dev
v3.10.11-lts
2024-06-19 16:01:26 +08:00
Bryan
1518f792d6 Merge pull request #13236 from jumpserver/dev
v3.10.10-lts
2024-05-16 16:04:07 +08:00
Bai
67277dd622 fix: 修复仪表盘会话排序数量都是 1 的问题 2024-04-22 19:42:33 +08:00
Bryan
82e7f020ea Merge pull request #13094 from jumpserver/dev
v3.10.9 (dev to master)
2024-04-22 19:39:53 +08:00
Bryan
f20b9e01ab Merge pull request #13062 from jumpserver/dev
v3.10.8 dev to master
2024-04-18 18:01:20 +08:00
Bryan
8cf8a3701b Merge pull request #13059 from jumpserver/dev
v3.10.8
2024-04-18 17:16:37 +08:00
Bryan
7ba24293d1 Merge pull request #12736 from jumpserver/pr@dev@master_fix
fix: 解决冲突
2024-02-29 16:38:43 +08:00
Bai
f10114c9ed fix: 解决冲突 2024-02-29 16:37:10 +08:00
Bryan
cf31cbfb07 Merge pull request #12729 from jumpserver/dev
v3.10.4
2024-02-29 16:19:59 +08:00
wangruidong
0edad24d5d fix: 资产过期消息提示发送失败 2024-02-04 11:41:48 +08:00
ibuler
1f1c1a9157 fix: 修复定时检测用户是否活跃任务无法执行的问题 2024-01-23 09:28:38 +00:00
feng
6c9d271ae1 fix: redis 密码有特殊字符celery beat启动失败 2024-01-22 06:18:34 +00:00
Bai
6ff852e225 perf: 修复 Count 时没有去重的问题 2024-01-22 06:16:25 +00:00
Bryan
baa75dc735 Merge pull request #12566 from jumpserver/master
v3.10.2
2024-01-17 07:34:28 -04:00
Bryan
8a9f0436b8 Merge pull request #12565 from jumpserver/dev
v3.10.2
2024-01-17 07:23:30 -04:00
Bryan
a9620a3cbe Merge pull request #12461 from jumpserver/master
v3.10.1
2023-12-29 11:33:05 +05:00
Bryan
769e7dc8a0 Merge pull request #12460 from jumpserver/dev
v3.10.1
2023-12-29 11:20:36 +05:00
Bryan
2a70449411 Merge pull request #12458 from jumpserver/dev
v3.10.1
2023-12-29 11:01:13 +05:00
Bryan
8df720f19e Merge pull request #12401 from jumpserver/dev
v3.10
2023-12-21 15:14:19 +05:00
老广
dabbb45f6e Merge pull request #12144 from jumpserver/dev
v3.9.0
2023-11-16 18:23:05 +08:00
Bryan
ce24c1c3fd Merge pull request #11914 from jumpserver/dev
v3.8.0
2023-10-19 03:37:39 -05:00
Bryan
3c54c82ce9 Merge pull request #11636 from jumpserver/dev
v3.7.0
2023-09-21 17:02:48 +08:00
331 changed files with 6596 additions and 17254 deletions

View File

@@ -1,4 +1,4 @@
FROM jumpserver/core-base:20250827_025554 AS stage-build
FROM jumpserver/core-base:20250509_094529 AS stage-build
ARG VERSION
@@ -33,7 +33,6 @@ ARG TOOLS=" \
default-libmysqlclient-dev \
openssh-client \
sshpass \
nmap \
bubblewrap"
ARG APT_MIRROR=http://deb.debian.org

View File

@@ -13,9 +13,7 @@ ARG TOOLS=" \
nmap \
telnet \
vim \
postgresql-client-13 \
wget \
poppler-utils"
wget"
RUN set -ex \
&& apt-get update \
@@ -28,5 +26,5 @@ WORKDIR /opt/jumpserver
ARG PIP_MIRROR=https://pypi.org/simple
RUN set -ex \
&& uv pip install -i${PIP_MIRROR} --group xpack \
&& playwright install chromium --with-deps --only-shell
&& uv pip install -i${PIP_MIRROR} --group xpack

View File

@@ -2,7 +2,7 @@
<a name="readme-top"></a>
<a href="https://jumpserver.com" target="_blank"><img src="https://download.jumpserver.org/images/jumpserver-logo.svg" alt="JumpServer" width="300" /></a>
## An open-source PAM platform (Bastion Host)
## An open-source PAM tool (Bastion Host)
[![][license-shield]][license-link]
[![][docs-shield]][docs-link]
@@ -19,7 +19,7 @@
## What is JumpServer?
JumpServer is an open-source Privileged Access Management (PAM) platform that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
JumpServer is an open-source Privileged Access Management (PAM) tool that provides DevOps and IT teams with on-demand and secure access to SSH, RDP, Kubernetes, Database and RemoteApp endpoints through a web browser.
<picture>
@@ -85,8 +85,6 @@ JumpServer consists of multiple key components, which collectively form the func
| [Nec](https://github.com/jumpserver/nec) | <img alt="Nec" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE VNC Proxy Connector |
| [Facelive](https://github.com/jumpserver/facelive) | <img alt="Facelive" src="https://img.shields.io/badge/release-private-red" /> | JumpServer EE Facial Recognition |
## Third-party projects
- [jumpserver-grafana-dashboard](https://github.com/acerrah/jumpserver-grafana-dashboard) JumpServer with grafana dashboard
## Contributing

View File

@@ -41,8 +41,8 @@ class AccountViewSet(OrgBulkModelViewSet):
'partial_update': ['accounts.change_account'],
'su_from_accounts': 'accounts.view_account',
'clear_secret': 'accounts.change_account',
'move_to_assets': 'accounts.delete_account',
'copy_to_assets': 'accounts.add_account',
'move_to_assets': 'accounts.create_account',
'copy_to_assets': 'accounts.create_account',
}
export_as_zip = True
@@ -190,7 +190,6 @@ class AccountHistoriesSecretAPI(ExtraFilterFieldsMixin, AccountRecordViewLogMixi
rbac_perms = {
'GET': 'accounts.view_accountsecret',
}
queryset = Account.history.model.objects.none()
@lazyproperty
def account(self) -> Account:

View File

@@ -20,7 +20,7 @@ __all__ = ['PamDashboardApi']
class PamDashboardApi(APIView):
http_method_names = ['get']
rbac_perms = {
'GET': 'rbac.view_pam',
'GET': 'accounts.view_account',
}
@staticmethod

View File

@@ -12,8 +12,6 @@ class VirtualAccountViewSet(OrgBulkModelViewSet):
filterset_fields = ('alias',)
def get_queryset(self):
if getattr(self, "swagger_fake_view", False):
return VirtualAccount.objects.none()
return VirtualAccount.get_or_init_queryset()
def get_object(self, ):

View File

@@ -41,7 +41,6 @@ class AutomationAssetsListApi(generics.ListAPIView):
class AutomationRemoveAssetApi(generics.UpdateAPIView):
model = BaseAutomation
queryset = BaseAutomation.objects.all()
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']
@@ -60,7 +59,6 @@ class AutomationRemoveAssetApi(generics.UpdateAPIView):
class AutomationAddAssetApi(generics.UpdateAPIView):
model = BaseAutomation
queryset = BaseAutomation.objects.all()
serializer_class = serializers.UpdateAssetSerializer
http_method_names = ['patch']

View File

@@ -97,13 +97,12 @@ class ChangeSecretRecordViewSet(mixins.ListModelMixin, OrgGenericViewSet):
def execute(self, request, *args, **kwargs):
record_ids = request.data.get('record_ids')
records = self.get_queryset().filter(id__in=record_ids)
if not records.exists():
execution_count = records.values_list('execution_id', flat=True).distinct().count()
if execution_count != 1:
return Response(
{'detail': 'No valid records found'},
{'detail': 'Only one execution is allowed to execute'},
status=status.HTTP_400_BAD_REQUEST
)
record_ids = [str(_id) for _id in records.values_list('id', flat=True)]
task = execute_automation_record_task.delay(record_ids, self.tp)
return Response({'task': task.id}, status=status.HTTP_200_OK)
@@ -154,10 +153,12 @@ class ChangSecretAddAssetApi(AutomationAddAssetApi):
model = ChangeSecretAutomation
serializer_class = serializers.ChangeSecretUpdateAssetSerializer
class ChangSecretNodeAddRemoveApi(AutomationNodeAddRemoveApi):
model = ChangeSecretAutomation
serializer_class = serializers.ChangeSecretUpdateNodeSerializer
class ChangeSecretStatusViewSet(OrgBulkModelViewSet):
perm_model = ChangeSecretAutomation
filterset_class = ChangeSecretStatusFilterSet

View File

@@ -62,8 +62,7 @@ class ChangeSecretDashboardApi(APIView):
status_counts = defaultdict(lambda: defaultdict(int))
for date_finished, status in results:
dt_local = timezone.localtime(date_finished)
date_str = str(dt_local.date())
date_str = str(date_finished.date())
if status == ChangeSecretRecordStatusChoice.failed:
status_counts[date_str]['failed'] += 1
elif status == ChangeSecretRecordStatusChoice.success:
@@ -91,10 +90,10 @@ class ChangeSecretDashboardApi(APIView):
def get_change_secret_asset_queryset(self):
qs = self.change_secrets_queryset
node_ids = qs.values_list('nodes', flat=True).distinct()
nodes = Node.objects.filter(id__in=node_ids).only('id', 'key')
node_ids = qs.filter(nodes__isnull=False).values_list('nodes', flat=True).distinct()
nodes = Node.objects.filter(id__in=node_ids)
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list('id', flat=True)
direct_asset_ids = qs.values_list('assets', flat=True).distinct()
direct_asset_ids = qs.filter(assets__isnull=False).values_list('assets', flat=True).distinct()
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))
return Asset.objects.filter(id__in=asset_ids)

View File

@@ -45,10 +45,10 @@ class CheckAccountAutomationViewSet(OrgBulkModelViewSet):
class CheckAccountExecutionViewSet(AutomationExecutionViewSet):
rbac_perms = (
("list", "accounts.view_checkaccountexecution"),
("retrieve", "accounts.view_checkaccountexecution"),
("retrieve", "accounts.view_checkaccountsexecution"),
("create", "accounts.add_checkaccountexecution"),
("adhoc", "accounts.add_checkaccountexecution"),
("report", "accounts.view_checkaccountexecution"),
("report", "accounts.view_checkaccountsexecution"),
)
ordering = ("-date_created",)
tp = AutomationTypes.check_account
@@ -150,9 +150,6 @@ class CheckAccountEngineViewSet(JMSModelViewSet):
http_method_names = ['get', 'options']
def get_queryset(self):
if getattr(self, "swagger_fake_view", False):
return CheckAccountEngine.objects.none()
return CheckAccountEngine.get_default_engines()
def filter_queryset(self, queryset: list):

View File

@@ -63,10 +63,12 @@ class PushAccountRemoveAssetApi(AutomationRemoveAssetApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateAssetSerializer
class PushAccountAddAssetApi(AutomationAddAssetApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateAssetSerializer
class PushAccountNodeAddRemoveApi(AutomationNodeAddRemoveApi):
model = PushAccountAutomation
serializer_class = serializers.PushAccountUpdateNodeSerializer
serializer_class = serializers.PushAccountUpdateNodeSerializer

View File

@@ -105,10 +105,6 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
h['account']['mode'] = 'sysdba' if account.privileged else None
return h
def add_extra_params(self, host, **kwargs):
host['ssh_params'] = {}
return host
def host_callback(self, host, asset=None, account=None, automation=None, path_dir=None, **kwargs):
host = super().host_callback(
host, asset=asset, account=account, automation=automation,
@@ -117,7 +113,8 @@ class BaseChangeSecretPushManager(AccountBasePlaybookManager):
if host.get('error'):
return host
host = self.add_extra_params(host, automation=automation)
host['ssh_params'] = {}
accounts = self.get_accounts(account)
existing_ids = set(map(str, accounts.values_list('id', flat=True)))
missing_ids = set(map(str, self.account_ids)) - existing_ids

View File

@@ -53,6 +53,4 @@
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
when: check_conn_after_change
register: result
failed_when: not result.is_available
when: check_conn_after_change

View File

@@ -39,8 +39,7 @@
name: "{{ account.username }}"
password: "{{ account.secret }}"
host: "%"
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
append_privs: "{{ db_name != '' | bool }}"
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
ignore_errors: true
when: db_info is succeeded

View File

@@ -56,5 +56,3 @@
ssl_key: "{{ ssl_key if check_ssl and ssl_key | length > 0 else omit }}"
ssl_mode: "{{ jms_asset.spec_info.pg_ssl_mode }}"
when: check_conn_after_change
register: result
failed_when: not result.is_available

View File

@@ -8,7 +8,7 @@ type:
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
@@ -24,7 +24,3 @@ i18n:
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -9,7 +9,7 @@ type:
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
@@ -25,8 +25,3 @@ i18n:
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -9,24 +9,19 @@ priority: 49
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
i18n:
Windows account change secret rdp verify:
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密(最后使用 Python 模块 pyfreerdp 验证账号的可连接性'
ja: 'Ansible モジュール win_user を使用して Windows アカウントのパスワードを変更します (最後に Python モジュール pyfreerdp を使用してアカウントの接続を確認します)'
en: 'Use the Ansible module win_user to change the Windows account password (finally use the Python module pyfreerdp to verify the account connectivity)'
zh: '使用 Ansible 模块 win_user 执行 Windows 账号改密 RDP 协议测试最后的可连接性'
ja: 'Ansibleモジュールwin_userWindowsアカウントの改密RDPプロトコルテストの最後の接続性を実行する'
en: 'Using the Ansible module win_user performs Windows account encryption RDP protocol testing for final connectivity'
Params groups help text:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -5,9 +5,6 @@ from django.conf import settings
from django.utils.translation import gettext_lazy as _
from xlsxwriter import Workbook
from assets.automations.methods import platform_automation_methods as asset_methods
from assets.const import AutomationTypes as AssetAutomationTypes
from accounts.automations.methods import platform_automation_methods as account_methods
from accounts.const import (
AutomationTypes, SecretStrategy, ChangeSecretRecordStatusChoice
)
@@ -25,22 +22,6 @@ logger = get_logger(__name__)
class ChangeSecretManager(BaseChangeSecretPushManager):
ansible_account_prefer = ''
def get_method_id_meta_mapper(self):
return {
method["id"]: method for method in self.platform_automation_methods
}
@property
def platform_automation_methods(self):
return asset_methods + account_methods
def add_extra_params(self, host, **kwargs):
host = super().add_extra_params(host, **kwargs)
automation = kwargs.get('automation')
for extra_type in [AssetAutomationTypes.ping, AutomationTypes.verify_account]:
host[f"{extra_type}_params"] = self.get_params(automation, extra_type)
return host
@classmethod
def method_type(cls):
return AutomationTypes.change_secret

View File

@@ -1,36 +0,0 @@
- hosts: website
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test privileged account
website_ping:
login_host: "{{ jms_asset.address }}"
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
steps: "{{ ping_params.steps }}"
load_state: "{{ ping_params.load_state }}"
- name: "Change {{ account.username }} password"
website_user:
login_host: "{{ jms_asset.address }}"
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
steps: "{{ params.steps }}"
load_state: "{{ params.load_state }}"
name: "{{ account.username }}"
password: "{{ account.secret }}"
ignore_errors: true
register: change_secret_result
- name: "Verify {{ account.username }} password"
website_ping:
login_host: "{{ jms_asset.address }}"
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
steps: "{{ verify_account_params.steps }}"
load_state: "{{ verify_account_params.load_state }}"
when:
- check_conn_after_change or change_secret_result.failed | default(false)
delegate_to: localhost

View File

@@ -1,51 +0,0 @@
id: change_account_website
name: "{{ 'Website account change secret' | trans }}"
category: web
type:
- website
method: change_secret
priority: 50
params:
- name: load_state
type: choice
label: "{{ 'Load state' | trans }}"
choices:
- [ networkidle, "{{ 'Network idle' | trans }}" ]
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
- [ load, "{{ 'Load completed' | trans }}" ]
default: 'load'
- name: steps
type: list
default: [ ]
label: "{{ 'Steps' | trans }}"
help_text: "{{ 'Params step help text' | trans }}"
i18n:
Website account change secret:
zh: 使用 Playwright 模拟浏览器变更账号密码
ja: Playwright を使用してブラウザをシミュレートし、アカウントのパスワードを変更します
en: Use Playwright to simulate a browser for account password change.
Load state:
zh: 加载状态检测
en: Load state detection
ja: ロード状態の検出
Steps:
zh: 步骤
en: Steps
ja: 手順
Network idle:
zh: 网络空闲
en: Network idle
ja: ネットワークが空いた状態
Dom content loaded:
zh: 文档内容加载完成
en: Dom content loaded
ja: ドキュメントの内容がロードされた状態
Load completed:
zh: 全部加载完成
en: All load completed
ja: すべてのロードが完了した状態
Params step help text:
zh: 根据配置决定任务执行步骤
ja: 設定に基づいてタスクの実行ステップを決定する
en: Determine task execution steps based on configuration

View File

@@ -15,13 +15,11 @@ from common.decorators import bulk_create_decorator, bulk_update_decorator
from settings.models import LeakPasswords
# 已设置手动 finish
@bulk_create_decorator(AccountRisk)
def create_risk(data):
return AccountRisk(**data)
# 已设置手动 finish
@bulk_update_decorator(AccountRisk, update_fields=["details", "status"])
def update_risk(risk):
return risk
@@ -219,9 +217,6 @@ class CheckAccountManager(BaseManager):
"details": [{"datetime": now, 'type': 'init'}],
})
create_risk.finish()
update_risk.finish()
def pre_run(self):
super().pre_run()
self.assets = self.execution.get_all_assets()
@@ -240,11 +235,6 @@ class CheckAccountManager(BaseManager):
print("Check: {} => {}".format(account, msg))
if not error:
AccountRisk.objects.filter(
asset=account.asset,
username=account.username,
risk=handler.risk
).delete()
continue
self.add_risk(handler.risk, account)
self.commit_risks(_assets)

View File

@@ -30,16 +30,6 @@ common_risk_items = [
diff_items = risk_items + common_risk_items
@bulk_create_decorator(AccountRisk)
def _create_risk(data):
return AccountRisk(**data)
@bulk_update_decorator(AccountRisk, update_fields=["details"])
def _update_risk(account):
return account
def format_datetime(value):
if isinstance(value, timezone.datetime):
return value.strftime("%Y-%m-%d %H:%M:%S")
@@ -151,17 +141,25 @@ class AnalyseAccountRisk:
found = assets_risks.get(key)
if not found:
_create_risk(dict(**d, details=[detail]))
self._create_risk(dict(**d, details=[detail]))
continue
found.details.append(detail)
_update_risk(found)
self._update_risk(found)
@bulk_create_decorator(AccountRisk)
def _create_risk(self, data):
return AccountRisk(**data)
@bulk_update_decorator(AccountRisk, update_fields=["details"])
def _update_risk(self, account):
return account
def lost_accounts(self, asset, lost_users):
if not self.check_risk:
return
for user in lost_users:
_create_risk(
self._create_risk(
dict(
asset_id=str(asset.id),
username=user,
@@ -178,7 +176,7 @@ class AnalyseAccountRisk:
self._analyse_item_changed(ga, d)
if not sys_found:
basic = {"asset": asset, "username": d["username"], 'gathered_account': ga}
_create_risk(
self._create_risk(
dict(
**basic,
risk=RiskChoice.new_found,
@@ -390,7 +388,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
self.update_gathered_account(ori_account, d)
ori_found = username in ori_users
need_analyser_gather_account.append((asset, ga, d, ori_found))
# 这里顺序不能调整risk 外键关联了 gathered_account 主键 id所以在创建 risk 需要保证 gathered_account 已经创建完成
self.create_gathered_account.finish()
self.update_gathered_account.finish()
for analysis_data in need_analyser_gather_account:
@@ -406,9 +403,6 @@ class GatherAccountsManager(AccountBasePlaybookManager):
present=True
)
# 因为有 bulk create, bulk update, 所以这里需要 sleep 一下,等待数据同步
_update_risk.finish()
_create_risk.finish()
time.sleep(0.5)
def get_report_template(self):

View File

@@ -54,5 +54,3 @@
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
when: check_conn_after_change
register: result
failed_when: not result.is_available

View File

@@ -39,8 +39,7 @@
name: "{{ account.username }}"
password: "{{ account.secret }}"
host: "%"
priv: "{{ omit if db_name == '' else db_name + '.*:ALL' }}"
append_privs: "{{ db_name != '' | bool }}"
priv: "{{ account.username + '.*:USAGE' if db_name == '' else db_name + '.*:ALL' }}"
ignore_errors: true
when: db_info is succeeded

View File

@@ -8,7 +8,7 @@ type:
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
@@ -22,8 +22,3 @@ i18n:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -9,7 +9,7 @@ type:
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
@@ -23,8 +23,3 @@ i18n:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -9,7 +9,7 @@ priority: 49
params:
- name: groups
type: str
label: "{{ 'Params groups label' | trans }}"
label: '用户组'
default: 'Users,Remote Desktop Users'
help_text: "{{ 'Params groups help text' | trans }}"
@@ -23,8 +23,3 @@ i18n:
zh: '请输入用户组,多个用户组使用逗号分隔(需填写已存在的用户组)'
ja: 'グループを入力してください。複数のグループはコンマで区切ってください(既存のグループを入力してください)'
en: 'Please enter the group. Multiple groups are separated by commas (please enter the existing group)'
Params groups label:
zh: '用户组'
ja: 'グループ'
en: 'Groups'

View File

@@ -3,7 +3,7 @@
vars:
ansible_shell_type: sh
ansible_connection: local
ansible_python_interpreter: "{{ local_python_interpreter }}"
ansible_python_interpreter: /opt/py3/bin/python
tasks:
- name: Verify account (pyfreerdp)

View File

@@ -16,5 +16,3 @@
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert }}"
register: result
failed_when: not result.is_available

View File

@@ -1,13 +0,0 @@
- hosts: website
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Verify account
website_ping:
login_host: "{{ jms_asset.address }}"
login_user: "{{ account.username }}"
login_password: "{{ account.secret }}"
steps: "{{ params.steps }}"
load_state: "{{ params.load_state }}"

View File

@@ -1,50 +0,0 @@
id: verify_account_website
name: "{{ 'Website account verify' | trans }}"
category: web
type:
- website
method: verify_account
priority: 50
params:
- name: load_state
type: choice
label: "{{ 'Load state' | trans }}"
choices:
- [ networkidle, "{{ 'Network idle' | trans }}" ]
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
- [ load, "{{ 'Load completed' | trans }}" ]
default: 'load'
- name: steps
type: list
label: "{{ 'Steps' | trans }}"
help_text: "{{ 'Params step help text' | trans }}"
default: []
i18n:
Website account verify:
zh: 使用 Playwright 模拟浏览器验证账号
ja: Playwright を使用してブラウザをシミュレートし、アカウントの検証を行います
en: Use Playwright to simulate a browser for account verification.
Load state:
zh: 加载状态检测
en: Load state detection
ja: ロード状態の検出
Steps:
zh: 步骤
en: Steps
ja: 手順
Network idle:
zh: 网络空闲
en: Network idle
ja: ネットワークが空いた状態
Dom content loaded:
zh: 文档内容加载完成
en: Dom content loaded
ja: ドキュメントの内容がロードされた状態
Load completed:
zh: 全部加载完成
en: All load completed
ja: すべてのロードが完了した状態
Params step help text:
zh: 配置步骤,根据配置决定任务执行步骤
ja: パラメータを設定し、設定に基づいてタスクの実行手順を決定します
en: Configure steps, and determine the task execution steps based on the configuration.

View File

@@ -1,5 +1,8 @@
# -*- coding: utf-8 -*-
#
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
from azure.identity import ClientSecretCredential
from azure.keyvault.secrets import SecretClient
from common.utils import get_logger
@@ -11,9 +14,6 @@ __all__ = ['AZUREVaultClient']
class AZUREVaultClient(object):
def __init__(self, vault_url, tenant_id, client_id, client_secret):
from azure.identity import ClientSecretCredential
from azure.keyvault.secrets import SecretClient
authentication_endpoint = 'https://login.microsoftonline.com/' \
if ('azure.net' in vault_url) else 'https://login.chinacloudapi.cn/'
@@ -23,8 +23,6 @@ class AZUREVaultClient(object):
self.client = SecretClient(vault_url=vault_url, credential=credentials)
def is_active(self):
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
try:
self.client.set_secret('jumpserver', '666')
except (ResourceNotFoundError, ClientAuthenticationError) as e:
@@ -34,8 +32,6 @@ class AZUREVaultClient(object):
return True, ''
def get(self, name, version=None):
from azure.core.exceptions import ResourceNotFoundError, ClientAuthenticationError
try:
secret = self.client.get_secret(name, version)
return secret.value

View File

@@ -46,16 +46,11 @@ class Migration(migrations.Migration):
],
options={
'verbose_name': 'Account',
'permissions': [
('view_accountsecret', 'Can view asset account secret'),
('view_historyaccount', 'Can view asset history account'),
('view_historyaccountsecret', 'Can view asset history account secret'),
('verify_account', 'Can verify account'),
('push_account', 'Can push account'),
('remove_account', 'Can remove account'),
('view_accountsession', 'Can view session'),
('view_accountactivity', 'Can view activity')
],
'permissions': [('view_accountsecret', 'Can view asset account secret'),
('view_historyaccount', 'Can view asset history account'),
('view_historyaccountsecret', 'Can view asset history account secret'),
('verify_account', 'Can verify account'), ('push_account', 'Can push account'),
('remove_account', 'Can remove account')],
},
),
migrations.CreateModel(

View File

@@ -335,7 +335,6 @@ class Migration(migrations.Migration):
],
options={
"abstract": False,
"verbose_name": "Check engine",
},
),
migrations.CreateModel(

View File

@@ -116,8 +116,6 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
('verify_account', _('Can verify account')),
('push_account', _('Can push account')),
('remove_account', _('Can remove account')),
('view_accountsession', _('Can view session')),
('view_accountactivity', _('Can view activity')),
]
def __str__(self):
@@ -132,7 +130,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.asset.platform
@lazyproperty
def alias(self) -> str:
def alias(self):
"""
别称,因为有虚拟账号,@INPUT @MANUAL @USER, 否则为 id
"""
@@ -140,13 +138,13 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.username
return str(self.id)
def is_virtual(self) -> bool:
def is_virtual(self):
"""
不要用 username 去判断,因为可能是构造的 account 对象,设置了同名账号的用户名,
"""
return self.alias.startswith('@')
def is_ds_account(self) -> bool:
def is_ds_account(self):
if self.is_virtual():
return ''
if not self.asset.is_directory_service:
@@ -160,7 +158,7 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return self.asset.ds
@lazyproperty
def ds_domain(self) -> str:
def ds_domain(self):
"""这个不能去掉perm_account 会动态设置这个值,以更改 full_username"""
if self.is_virtual():
return ''
@@ -172,17 +170,17 @@ class Account(AbsConnectivity, LabeledMixin, BaseAccount, JSONFilterMixin):
return '@' in self.username or '\\' in self.username
@property
def full_username(self) -> str:
def full_username(self):
if not self.username_has_domain() and self.ds_domain:
return '{}@{}'.format(self.username, self.ds_domain)
return self.username
@lazyproperty
def has_secret(self) -> bool:
def has_secret(self):
return bool(self.secret)
@lazyproperty
def versions(self) -> int:
def versions(self):
return self.history.count()
def get_su_from_accounts(self):

View File

@@ -33,7 +33,7 @@ class IntegrationApplication(JMSOrgBaseModel):
return qs.filter(*query)
@property
def accounts_amount(self) -> int:
def accounts_amount(self):
return self.get_accounts().count()
@property

View File

@@ -68,10 +68,8 @@ class AccountRisk(JMSOrgBaseModel):
related_name='risks', null=True
)
risk = models.CharField(max_length=128, verbose_name=_('Risk'), choices=RiskChoice.choices)
status = models.CharField(
max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
blank=True, verbose_name=_('Status')
)
status = models.CharField(max_length=32, choices=ConfirmOrIgnore.choices, default=ConfirmOrIgnore.pending,
blank=True, verbose_name=_('Status'))
details = models.JSONField(default=list, verbose_name=_('Detail'))
class Meta:
@@ -121,9 +119,6 @@ class CheckAccountEngine(JMSBaseModel):
def __str__(self):
return self.name
class Meta:
verbose_name = _('Check engine')
@staticmethod
def get_default_engines():
data = [

View File

@@ -75,11 +75,11 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return bool(self.secret)
@property
def has_username(self) -> bool:
def has_username(self):
return bool(self.username)
@property
def spec_info(self) -> dict:
def spec_info(self):
data = {}
if self.secret_type != SecretType.SSH_KEY:
return data
@@ -87,13 +87,13 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return data
@property
def password(self) -> str:
def password(self):
if self.secret_type == SecretType.PASSWORD:
return self.secret
return None
@property
def private_key(self) -> str:
def private_key(self):
if self.secret_type == SecretType.SSH_KEY:
return self.secret
return None
@@ -110,7 +110,7 @@ class BaseAccount(VaultModelMixin, JMSOrgBaseModel):
return None
@property
def ssh_key_fingerprint(self) -> str:
def ssh_key_fingerprint(self):
if self.public_key:
public_key = self.public_key
elif self.private_key:

View File

@@ -56,7 +56,7 @@ class VaultModelMixin(models.Model):
__secret = None
@property
def secret(self) -> str:
def secret(self):
if self.__secret:
return self.__secret
from accounts.backends import vault_client

View File

@@ -18,11 +18,11 @@ class VirtualAccount(JMSOrgBaseModel):
verbose_name = _('Virtual account')
@property
def name(self) -> str:
def name(self):
return self.get_alias_display()
@property
def username(self) -> str:
def username(self):
usernames_map = {
AliasAccount.INPUT: _("Manual input"),
AliasAccount.USER: _("Same with user"),
@@ -32,7 +32,7 @@ class VirtualAccount(JMSOrgBaseModel):
return usernames_map.get(self.alias, '')
@property
def comment(self) -> str:
def comment(self):
comments_map = {
AliasAccount.INPUT: _('Non-asset account, Input username/password on connect'),
AliasAccount.USER: _('The account username name same with user on connect'),

View File

@@ -456,8 +456,6 @@ class AssetAccountBulkSerializer(
class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
class Meta(AccountSerializer.Meta):
fields = AccountSerializer.Meta.fields + ['spec_info']
extra_kwargs = {
@@ -472,7 +470,6 @@ class AccountSecretSerializer(SecretReadableMixin, AccountSerializer):
class AccountHistorySerializer(serializers.ModelSerializer):
secret_type = LabeledChoiceField(choices=SecretType.choices, label=_('Secret type'))
secret = serializers.CharField(label=_('Secret'), read_only=True)
id = serializers.IntegerField(label=_('ID'), source='history_id', read_only=True)
class Meta:

View File

@@ -70,8 +70,6 @@ class AuthValidateMixin(serializers.Serializer):
class BaseAccountSerializer(
AuthValidateMixin, ResourceLabelsMixin, BulkOrgResourceModelSerializer
):
spec_info = serializers.DictField(label=_('Spec info'), read_only=True)
class Meta:
model = BaseAccount
fields_mini = ["id", "name", "username"]

View File

@@ -1,9 +1,8 @@
# -*- coding: utf-8 -*-
#
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from accounts.const import AutomationTypes, AccountBackupType
from accounts.const import AutomationTypes
from accounts.models import BackupAccountAutomation
from common.serializers.fields import EncryptedField
from common.utils import get_logger
@@ -42,17 +41,6 @@ class BackupAccountSerializer(BaseAutomationSerializer):
'types': {'label': _('Asset type')}
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.set_backup_type_choices()
def set_backup_type_choices(self):
field_backup_type = self.fields.get("backup_type")
if not field_backup_type:
return
if not settings.XPACK_LICENSE_IS_VALID:
field_backup_type._choices.pop(AccountBackupType.object_storage, None)
@property
def model_type(self):
return AutomationTypes.backup_account

View File

@@ -130,7 +130,7 @@ class ChangeSecretRecordSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_is_success(obj) -> bool:
def get_is_success(obj):
return obj.status == ChangeSecretRecordStatusChoice.success
@@ -157,7 +157,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_asset(instance) -> str:
def get_asset(instance):
return str(instance.asset)
@staticmethod
@@ -165,7 +165,7 @@ class ChangeSecretRecordBackUpSerializer(serializers.ModelSerializer):
return str(instance.account)
@staticmethod
def get_is_success(obj) -> str:
def get_is_success(obj):
if obj.status == ChangeSecretRecordStatusChoice.success.value:
return _("Success")
return _("Failed")
@@ -196,9 +196,9 @@ class ChangeSecretAccountSerializer(serializers.ModelSerializer):
read_only_fields = fields
@staticmethod
def get_meta(obj) -> dict:
def get_meta(obj):
return account_secret_task_status.get(str(obj.id))
@staticmethod
def get_ttl(obj) -> int:
def get_ttl(obj):
return account_secret_task_status.get_ttl(str(obj.id))

View File

@@ -69,7 +69,7 @@ class AssetRiskSerializer(serializers.Serializer):
risk_summary = serializers.SerializerMethodField()
@staticmethod
def get_risk_summary(obj) -> dict:
def get_risk_summary(obj):
summary = {}
for risk in RiskChoice.choices:
summary[f"{risk[0]}_count"] = obj.get(f"{risk[0]}_count", 0)

View File

@@ -28,7 +28,7 @@ class DiscoverAccountAutomationSerializer(BaseAutomationSerializer):
+ read_only_fields)
extra_kwargs = {
'check_risk': {
'help_text': _('Whether to check the risk of the discovered accounts.'),
'help_text': _('Whether to check the risk of the gathered accounts.'),
},
**BaseAutomationSerializer.Meta.extra_kwargs
}

View File

@@ -1,5 +1,4 @@
import datetime
from collections import defaultdict
from celery import shared_task
from django.db.models import Q
@@ -73,43 +72,24 @@ def execute_automation_record_task(record_ids, tp):
task_name = gettext_noop('Execute automation record')
with tmp_to_root_org():
records = ChangeSecretRecord.objects.filter(id__in=record_ids).order_by('-date_updated')
records = ChangeSecretRecord.objects.filter(id__in=record_ids)
if not records:
logger.error(f'No automation record found: {record_ids}')
logger.error('No automation record found: {}'.format(record_ids))
return
seen_accounts = set()
unique_records = []
for rec in records:
acct = str(rec.account_id)
if acct not in seen_accounts:
seen_accounts.add(acct)
unique_records.append(rec)
exec_groups = defaultdict(list)
for rec in unique_records:
exec_groups[rec.execution_id].append(rec)
for __, group in exec_groups.items():
latest_rec = group[0]
snapshot = getattr(latest_rec.execution, 'snapshot', {}) or {}
record_map = {f"{r.asset_id}-{r.account_id}": str(r.id) for r in group}
assets = [str(r.asset_id) for r in group]
accounts = [str(r.account_id) for r in group]
task_snapshot = {
'params': {},
'record_map': record_map,
'secret': latest_rec.new_secret,
'secret_type': snapshot.get('secret_type'),
'assets': assets,
'accounts': accounts,
}
with tmp_to_org(latest_rec.execution.org_id):
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
record = records[0]
record_map = {f'{record.asset_id}-{record.account_id}': str(record.id) for record in records}
task_snapshot = {
'params': {},
'record_map': record_map,
'secret': record.new_secret,
'secret_type': record.execution.snapshot.get('secret_type'),
'assets': [str(instance.asset_id) for instance in records],
'accounts': [str(instance.account_id) for instance in records],
}
with tmp_to_org(record.execution.org_id):
quickstart_automation_by_snapshot(task_name, tp, task_snapshot)
@shared_task(

View File

@@ -5,7 +5,7 @@ from django.utils.translation import gettext_lazy as _
from common.db.fields import JSONManyToManyField
from common.db.models import JMSBaseModel
from common.utils import contains_ip
from common.utils.timezone import contains_time_period
from common.utils.time_period import contains_time_period
from orgs.mixins.models import OrgModelMixin, OrgManager
from ..const import ActionChoices

View File

@@ -34,16 +34,16 @@ class CommandGroup(JMSOrgBaseModel):
@lazyproperty
def pattern(self):
content = self.content.replace('\r\n', '\n')
if self.type == 'command':
s = self.construct_command_regex(content)
s = self.construct_command_regex(self.content)
else:
s = r'{0}'.format(r'{}'.format('|'.join(content.split('\n'))))
s = r'{0}'.format(self.content)
return s
@classmethod
def construct_command_regex(cls, content):
regex = []
content = content.replace('\r\n', '\n')
for _cmd in content.split('\n'):
cmd = re.sub(r'\s+', ' ', _cmd)
cmd = re.escape(cmd)

View File

@@ -1,4 +1,4 @@
from common.serializers.mixin import CommonBulkModelSerializer
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .base import BaseUserAssetAccountACLSerializer as BaseSerializer
from ..const import ActionChoices
from ..models import ConnectMethodACL
@@ -6,15 +6,16 @@ from ..models import ConnectMethodACL
__all__ = ["ConnectMethodACLSerializer"]
class ConnectMethodACLSerializer(BaseSerializer, CommonBulkModelSerializer):
class ConnectMethodACLSerializer(BaseSerializer, BulkOrgResourceModelSerializer):
class Meta(BaseSerializer.Meta):
model = ConnectMethodACL
fields = [
i for i in BaseSerializer.Meta.fields + ['connect_methods']
if i not in ['assets', 'accounts', 'org_id']
if i not in ['assets', 'accounts']
]
action_choices_exclude = BaseSerializer.Meta.action_choices_exclude + [
ActionChoices.review,
ActionChoices.accept,
ActionChoices.notice,
ActionChoices.face_verify,
ActionChoices.face_online,

View File

@@ -1,7 +1,7 @@
from django.utils.translation import gettext as _
from common.serializers import CommonBulkModelSerializer
from common.serializers import MethodSerializer
from orgs.mixins.serializers import BulkOrgResourceModelSerializer
from .base import BaseUserACLSerializer
from .rules import RuleSerializer
from ..const import ActionChoices
@@ -12,12 +12,12 @@ __all__ = ["LoginACLSerializer"]
common_help_text = _("With * indicating a match all. ")
class LoginACLSerializer(BaseUserACLSerializer, CommonBulkModelSerializer):
class LoginACLSerializer(BaseUserACLSerializer, BulkOrgResourceModelSerializer):
rules = MethodSerializer(label=_('Rule'))
class Meta(BaseUserACLSerializer.Meta):
model = LoginACL
fields = list((set(BaseUserACLSerializer.Meta.fields) | {'rules'}) - {'org_id'})
fields = BaseUserACLSerializer.Meta.fields + ['rules', ]
action_choices_exclude = [
ActionChoices.warning,
ActionChoices.notify_and_warn,

View File

@@ -1,7 +1,5 @@
# coding: utf-8
#
from urllib.parse import urlparse
from django.utils.translation import gettext_lazy as _
from rest_framework import serializers
@@ -10,7 +8,7 @@ from common.utils.ip import is_ip_address, is_ip_network, is_ip_segment
logger = get_logger(__file__)
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text', 'address_validator']
__all__ = ['RuleSerializer', 'ip_group_child_validator', 'ip_group_help_text']
def ip_group_child_validator(ip_group_child):
@@ -23,19 +21,6 @@ def ip_group_child_validator(ip_group_child):
raise serializers.ValidationError(error)
def address_validator(value):
parsed = urlparse(value)
is_basic_url = parsed.scheme in ('http', 'https') and parsed.netloc
is_valid = value == '*' \
or is_ip_address(value) \
or is_ip_network(value) \
or is_ip_segment(value) \
or is_basic_url
if not is_valid:
error = _('address invalid: `{}`').format(value)
raise serializers.ValidationError(error)
ip_group_help_text = _(
'With * indicating a match all. '
'Such as: '

View File

@@ -16,7 +16,6 @@ class CategoryViewSet(ListModelMixin, JMSGenericViewSet):
'types': TypeSerializer,
}
permission_classes = (IsValidUser,)
default_limit = None
def get_queryset(self):
return AllTypes.categories()

View File

@@ -14,7 +14,6 @@ class FavoriteAssetViewSet(BulkModelViewSet):
serializer_class = FavoriteAssetSerializer
permission_classes = (IsValidUser,)
filterset_fields = ['asset']
default_limit = None
def dispatch(self, request, *args, **kwargs):
with tmp_to_root_org():

View File

@@ -7,18 +7,15 @@ from rest_framework.decorators import action
from rest_framework.response import Response
from assets.const import AllTypes
from assets.models import Platform, Node, Asset, PlatformProtocol, PlatformAutomation
from assets.models import Platform, Node, Asset, PlatformProtocol
from assets.serializers import PlatformSerializer, PlatformProtocolSerializer, PlatformListSerializer
from common.api import JMSModelViewSet
from common.permissions import IsValidUser
from common.serializers import GroupedChoiceSerializer
from rbac.models import RoleBinding
__all__ = ['AssetPlatformViewSet', 'PlatformAutomationMethodsApi', 'PlatformProtocolViewSet']
class PlatformFilter(filters.FilterSet):
name__startswith = filters.CharFilter(field_name='name', lookup_expr='istartswith')
@@ -43,7 +40,6 @@ class AssetPlatformViewSet(JMSModelViewSet):
'ops_methods': 'assets.view_platform',
'filter_nodes_assets': 'assets.view_platform',
}
default_limit = None
def get_queryset(self):
# 因为没有走分页逻辑,所以需要这里 prefetch
@@ -67,13 +63,6 @@ class AssetPlatformViewSet(JMSModelViewSet):
return super().get_object()
return self.get_queryset().get(name=pk)
def check_permissions(self, request):
if self.action == 'list' and RoleBinding.is_org_admin(request.user):
return True
else:
return super().check_permissions(request)
def check_object_permissions(self, request, obj):
if request.method.lower() in ['delete', 'put', 'patch'] and obj.internal:
self.permission_denied(
@@ -113,7 +102,6 @@ class PlatformProtocolViewSet(JMSModelViewSet):
class PlatformAutomationMethodsApi(generics.ListAPIView):
permission_classes = (IsValidUser,)
queryset = PlatformAutomation.objects.none()
@staticmethod
def automation_methods():

View File

@@ -1,8 +1,8 @@
from rest_framework.generics import ListAPIView
from assets import serializers
from assets.const import Protocol
from common.permissions import IsValidUser
from assets.models import Protocol
__all__ = ['ProtocolListApi']
@@ -13,13 +13,3 @@ class ProtocolListApi(ListAPIView):
def get_queryset(self):
return list(Protocol.protocols())
def filter_queryset(self, queryset):
search = self.request.query_params.get("search", "").lower().strip()
if not search:
return queryset
queryset = [
p for p in queryset
if search in p['label'].lower() or search in p['value'].lower()
]
return queryset

View File

@@ -161,7 +161,6 @@ class CategoryTreeApi(SerializeToTreeNodeMixin, generics.ListAPIView):
'GET': 'assets.view_asset',
'list': 'assets.view_asset',
}
queryset = Node.objects.none()
def get_assets(self):
key = self.request.query_params.get('key')

View File

@@ -123,7 +123,9 @@ class BaseManager:
self.execution.summary = self.summary
self.execution.result = self.result
self.execution.status = self.status
self.execution.save()
with safe_atomic_db_connection():
self.execution.save()
def print_summary(self):
content = "\nSummery: \n"
@@ -155,7 +157,7 @@ class BaseManager:
report = self.gen_report()
report = transform(report, cssutils_logging_level="CRITICAL")
subject = self.get_report_subject()
emails = [user.email]
emails = [r.email for r in recipients if r.email]
send_mail_async(subject, report, emails, html_message=report)
def gen_report(self):
@@ -165,10 +167,9 @@ class BaseManager:
return data
def post_run(self):
with safe_atomic_db_connection():
self.update_execution()
self.print_summary()
self.send_report_if_need()
self.update_execution()
self.print_summary()
self.send_report_if_need()
def run(self, *args, **kwargs):
self.pre_run()
@@ -201,17 +202,14 @@ class PlaybookPrepareMixin:
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# example: {'gather_fact_windows': {'id': 'gather_fact_windows', 'name': '', 'method': 'gather_fact', ...} }
self.method_id_meta_mapper = self.get_method_id_meta_mapper()
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
self.playbooks = []
def get_method_id_meta_mapper(self):
return {
self.method_id_meta_mapper = {
method["id"]: method
for method in self.platform_automation_methods
if method["method"] == self.__class__.method_type()
}
# 根据执行方式就行分组, 不同资产的改密、推送等操作可能会使用不同的执行方式
# 然后根据执行方式分组, 再根据 bulk_size 分组, 生成不同的 playbook
self.playbooks = []
@classmethod
def method_type(cls):
@@ -550,8 +548,7 @@ class BasePlaybookManager(PlaybookPrepareMixin, BaseManager):
try:
kwargs.update({"clean_workspace": False})
cb = runner.run(**kwargs)
with safe_atomic_db_connection():
self.on_runner_success(runner, cb)
self.on_runner_success(runner, cb)
except Exception as e:
self.on_runner_failed(runner, e, **info)
finally:

View File

@@ -11,20 +11,15 @@ class FormatAssetInfo:
@staticmethod
def get_cpu_model_count(cpus):
try:
if len(cpus) % 3 == 0:
step = 3
models = [cpus[i + 2] for i in range(0, len(cpus), step)]
elif len(cpus) % 2 == 0:
step = 2
models = [cpus[i + 1] for i in range(0, len(cpus), step)]
else:
raise ValueError("CPU list format not recognized")
models = [cpus[i + 1] + " " + cpus[i + 2] for i in range(0, len(cpus), 3)]
model_counts = Counter(models)
result = ', '.join([f"{model} x{count}" for model, count in model_counts.items()])
except Exception as e:
print(f"Error processing CPU model list: {e}")
result = ''
return result
@staticmethod

View File

@@ -3,8 +3,7 @@
vars:
ansible_shell_type: sh
ansible_connection: local
ansible_python_interpreter: "{{ local_python_interpreter }}"
ansible_timeout: 30
ansible_python_interpreter: /opt/py3/bin/python
tasks:
- name: Test asset connection (pyfreerdp)

View File

@@ -4,7 +4,7 @@
ansible_connection: local
ansible_shell_type: sh
ansible_become: false
ansible_timeout: 30
tasks:
- name: Test asset connection (paramiko)
ssh_ping:

View File

@@ -3,7 +3,7 @@
vars:
ansible_connection: local
ansible_shell_type: sh
ansible_timeout: 30
tasks:
- name: Test asset connection (telnet)
telnet_ping:

View File

@@ -2,7 +2,6 @@
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
ansible_timeout: 30
tasks:
- name: Test MongoDB connection
@@ -17,5 +16,3 @@
ssl_certfile: "{{ jms_asset.secret_info.client_key | default('') }}"
connection_options:
- tlsAllowInvalidHostnames: "{{ jms_asset.spec_info.allow_invalid_cert}}"
register: result
failed_when: not result.is_available

View File

@@ -6,7 +6,6 @@
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
ansible_timeout: 30
tasks:
- name: Test MySQL connection

View File

@@ -2,7 +2,6 @@
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
ansible_timeout: 30
tasks:
- name: Test Oracle connection

View File

@@ -6,7 +6,7 @@
ca_cert: "{{ jms_asset.secret_info.ca_cert | default('') }}"
ssl_cert: "{{ jms_asset.secret_info.client_cert | default('') }}"
ssl_key: "{{ jms_asset.secret_info.client_key | default('') }}"
ansible_timeout: 30
tasks:
- name: Test PostgreSQL connection
community.postgresql.postgresql_ping:

View File

@@ -2,7 +2,6 @@
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
ansible_timeout: 30
tasks:
- name: Test SQLServer connection

View File

@@ -1,13 +0,0 @@
- hosts: website
gather_facts: no
vars:
ansible_python_interpreter: "{{ local_python_interpreter }}"
tasks:
- name: Test Website connection
website_ping:
login_host: "{{ jms_asset.address }}"
login_user: "{{ jms_account.username }}"
login_password: "{{ jms_account.secret }}"
steps: "{{ params.steps }}"
load_state: "{{ params.load_state }}"

View File

@@ -1,50 +0,0 @@
id: website_ping
name: "{{ 'Website ping' | trans }}"
method: ping
category:
- web
type:
- website
params:
- name: load_state
type: choice
label: "{{ 'Load state' | trans }}"
choices:
- [ networkidle, "{{ 'Network idle' | trans }}" ]
- [ domcontentloaded, "{{ 'Dom content loaded' | trans }}" ]
- [ load, "{{ 'Load completed' | trans }}" ]
default: 'load'
- name: steps
type: list
default: []
label: "{{ 'Steps' | trans }}"
help_text: "{{ 'Params step help text' | trans }}"
i18n:
Website ping:
zh: 使用 Playwright 模拟浏览器测试可连接性
en: Use Playwright to simulate a browser for connectivity testing
ja: Playwright を使用してブラウザをシミュレートし、接続性テストを実行する
Load state:
zh: 加载状态检测
en: Load state detection
ja: ロード状態の検出
Steps:
zh: 步骤
en: Steps
ja: 手順
Network idle:
zh: 网络空闲
en: Network idle
ja: ネットワークが空いた状態
Dom content loaded:
zh: 文档内容加载完成
en: Dom content loaded
ja: ドキュメントの内容がロードされた状態
Load completed:
zh: 全部加载完成
en: All load completed
ja: すべてのロードが完了した状態
Params step help text:
zh: 配置步骤,根据配置决定任务执行步骤
ja: パラメータを設定し、設定に基づいてタスクの実行手順を決定します
en: Configure steps, and determine the task execution steps based on the configuration.

View File

@@ -14,10 +14,6 @@ class Connectivity(TextChoices):
NTLM_ERR = 'ntlm_err', _('NTLM credentials rejected error')
CREATE_TEMPORARY_ERR = 'create_temp_err', _('Create temporary error')
@classmethod
def as_dict(cls):
return {choice.value: choice.label for choice in cls}
class AutomationTypes(TextChoices):
ping = 'ping', _('Ping')

View File

@@ -20,7 +20,3 @@ class Category(ChoicesMixin, models.TextChoices):
_category = getattr(cls, category.upper(), None)
choices = [(_category.value, _category.label)] if _category else cls.choices
return choices
@classmethod
def as_dict(cls):
return {choice.value: choice.label for choice in cls}

View File

@@ -194,12 +194,6 @@ class Protocol(ChoicesMixin, models.TextChoices):
'default': '>=2014',
'label': _('Version'),
'help_text': _('SQL Server version, Different versions have different connection drivers')
},
'encrypt': {
'type': 'bool',
'default': True,
'label': _('Encrypt'),
'help_text': _('Whether to use TLS encryption.')
}
}
},
@@ -349,7 +343,7 @@ class Protocol(ChoicesMixin, models.TextChoices):
for protocol, config in cls.settings().items():
if not xpack_enabled and config.get('xpack', False):
continue
protocols.append({'label': protocol.label, 'value': protocol.value})
protocols.append(protocol)
from assets.models.platform import PlatformProtocol
custom_protocols = (

View File

@@ -20,17 +20,13 @@ class WebTypes(BaseType):
def _get_automation_constrains(cls) -> dict:
constrains = {
'*': {
'ansible_enabled': True,
'ansible_config': {
'ansible_connection': 'local',
},
'ping_enabled': True,
'ansible_enabled': False,
'ping_enabled': False,
'gather_facts_enabled': False,
'verify_account_enabled': True,
'change_secret_enabled': True,
'verify_account_enabled': False,
'change_secret_enabled': False,
'push_account_enabled': False,
'gather_accounts_enabled': False,
'remove_account_enabled': False,
}
}
return constrains

View File

@@ -112,7 +112,7 @@ class Protocol(models.Model):
return protocols[0] if len(protocols) > 0 else {}
@property
def setting(self) -> dict:
def setting(self):
if self._setting is not None:
return self._setting
return self.asset_platform_protocol.get('setting', {})
@@ -122,7 +122,7 @@ class Protocol(models.Model):
self._setting = value
@property
def public(self) -> bool:
def public(self):
return self.asset_platform_protocol.get('public', True)
@@ -210,7 +210,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return self.category == const.Category.DS and hasattr(self, 'ds')
@lazyproperty
def spec_info(self) -> dict:
def spec_info(self):
instance = getattr(self, self.category, None)
if not instance:
return {}
@@ -240,7 +240,7 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return info
@lazyproperty
def auto_config(self) -> dict:
def auto_config(self):
platform = self.platform
auto_config = {
'su_enabled': platform.su_enabled,
@@ -343,11 +343,11 @@ class Asset(NodesRelationMixin, LabeledMixin, AbsConnectivity, JSONFilterMixin,
return names
@lazyproperty
def type(self) -> str:
def type(self):
return self.platform.type
@lazyproperty
def category(self) -> str:
def category(self):
return self.platform.category
def is_category(self, category):

View File

@@ -53,7 +53,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
return name
def get_all_assets(self):
nodes = self.nodes.only("id", "key")
nodes = self.nodes.all()
node_asset_ids = Node.get_nodes_all_assets(*nodes).values_list("id", flat=True)
direct_asset_ids = self.assets.all().values_list("id", flat=True)
asset_ids = set(list(direct_asset_ids) + list(node_asset_ids))

View File

@@ -573,7 +573,7 @@ class Node(JMSOrgBaseModel, SomeNodesMixin, FamilyMixin, NodeAssetsMixin):
return not self.__gt__(other)
@property
def name(self) -> str:
def name(self):
return self.value
def computed_full_value(self):

View File

@@ -25,7 +25,7 @@ class PlatformProtocol(models.Model):
return '{}/{}'.format(self.name, self.port)
@property
def secret_types(self) -> list:
def secret_types(self):
return Protocol.settings().get(self.name, {}).get('secret_types', ['password'])
@lazyproperty

View File

@@ -147,7 +147,6 @@ class AssetSerializer(BulkOrgResourceModelSerializer, ResourceLabelsMixin, Writa
protocols = AssetProtocolsSerializer(many=True, required=False, label=_('Protocols'), default=())
accounts = AssetAccountSerializer(many=True, required=False, allow_null=True, write_only=True, label=_('Accounts'))
nodes_display = NodeDisplaySerializer(read_only=False, required=False, label=_("Node path"))
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
platform = ObjectRelatedField(queryset=Platform.objects, required=True, label=_('Platform'),
attrs=('id', 'name', 'type'))
accounts_amount = serializers.IntegerField(read_only=True, label=_('Accounts amount'))
@@ -426,18 +425,6 @@ class DetailMixin(serializers.Serializer):
gathered_info = MethodSerializer(label=_('Gathered info'), read_only=True)
auto_config = serializers.DictField(read_only=True, label=_('Auto info'))
@staticmethod
def get_auto_config(obj) -> dict:
return obj.auto_config
@staticmethod
def get_gathered_info(obj) -> dict:
return obj.gathered_info
@staticmethod
def get_spec_info(obj) -> dict:
return obj.spec_info
def get_instance(self):
request = self.context.get('request')
if not self.instance and UUID_PATTERN.findall(request.path):

View File

@@ -1,11 +1,10 @@
from django.db.models import QuerySet
from django.utils.translation import gettext_lazy as _, get_language
from django.utils.translation import gettext_lazy as _
from assets.models import Custom, Platform, Asset
from common.const import UUID_PATTERN
from common.serializers import create_serializer_class
from common.serializers.common import DictSerializer, MethodSerializer
from terminal.models import Applet
from .common import AssetSerializer
__all__ = ['CustomSerializer']
@@ -48,38 +47,8 @@ class CustomSerializer(AssetSerializer):
if not platform:
return default_field
custom_fields = platform.custom_fields
if not custom_fields:
return default_field
name = platform.name.title() + 'CustomSerializer'
applet = Applet.objects.filter(
name=platform.created_by.replace('Applet:', '')
).first()
if not applet:
return create_serializer_class(name, custom_fields)()
i18n = applet.manifest.get('i18n', {})
lang = get_language()
lang_short = lang[:2]
def translate_text(key):
return (
i18n.get(key, {}).get(lang)
or i18n.get(key, {}).get(lang_short)
or key
)
for field in custom_fields:
label = field.get('label')
help_text = field.get('help_text')
if label:
field['label'] = translate_text(label)
if help_text:
field['help_text'] = translate_text(help_text)
return create_serializer_class(name, custom_fields)()

View File

@@ -19,13 +19,11 @@ __all__ = [
class BaseAutomationSerializer(PeriodTaskSerializerMixin, BulkOrgResourceModelSerializer):
assets = ObjectRelatedField(many=True, required=False, queryset=Asset.objects, label=_('Assets'))
nodes = ObjectRelatedField(many=True, required=False, queryset=Node.objects, label=_('Nodes'))
executed_amount = serializers.IntegerField(read_only=True, label=_('Executed amount'))
class Meta:
read_only_fields = [
'date_created', 'date_updated', 'created_by',
'periodic_display', 'executed_amount', 'type',
'last_execution_date',
'periodic_display', 'executed_amount', 'type', 'last_execution_date'
]
mini_fields = [
'id', 'name', 'type', 'is_periodic', 'interval',

View File

@@ -32,7 +32,7 @@ from rbac.permissions import RBACPermission
from terminal.models import default_storage
from users.models import User
from .backends import TYPE_ENGINE_MAPPING
from .const import ActivityChoices, ActionChoices
from .const import ActivityChoices
from .filters import UserSessionFilterSet, OperateLogFilterSet
from .models import (
FTPLog, UserLoginLog, OperateLog, PasswordChangeLog,
@@ -45,7 +45,7 @@ from .serializers import (
FileSerializer, UserSessionSerializer, JobsAuditSerializer,
ServiceAccessLogSerializer
)
from .utils import construct_userlogin_usernames, record_operate_log_and_activity_log
from .utils import construct_userlogin_usernames
logger = get_logger(__name__)
@@ -126,11 +126,6 @@ class FTPLogViewSet(OrgModelViewSet):
response['Content-Type'] = 'application/octet-stream'
filename = escape_uri_path(ftp_log.filename)
response["Content-Disposition"] = "attachment; filename*=UTF-8''{}".format(filename)
record_operate_log_and_activity_log(
[ftp_log.id], ActionChoices.download, '', self.model,
resource_display=f'{ftp_log.asset}: {ftp_log.filename}',
)
return response
@action(methods=[POST], detail=True, permission_classes=[IsServiceAccount, ], serializer_class=FileSerializer)
@@ -172,7 +167,10 @@ class UserLoginLogViewSet(UserLoginCommonMixin, OrgReadonlyModelViewSet):
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.model.filter_queryset_by_org(queryset)
if current_org.is_root() or not settings.XPACK_ENABLED:
return queryset
users = self.get_org_member_usernames()
queryset = queryset.filter(username__in=users)
return queryset
@@ -294,7 +292,12 @@ class PasswordChangeLogViewSet(OrgReadonlyModelViewSet):
def get_queryset(self):
queryset = super().get_queryset()
return self.model.filter_queryset_by_org(queryset)
if not current_org.is_root():
users = current_org.get_members()
queryset = queryset.filter(
user__in=[str(user) for user in users]
)
return queryset
class UserSessionViewSet(CommonApiMixin, viewsets.ModelViewSet):

View File

@@ -35,7 +35,6 @@ class OperateLogStore(ES, metaclass=Singleton):
}
}
exact_fields = {}
fuzzy_fields = {}
match_fields = {
'id', 'user', 'action', 'resource_type',
'resource', 'remote_addr', 'org_id'
@@ -45,7 +44,7 @@ class OperateLogStore(ES, metaclass=Singleton):
}
if not config.get('INDEX'):
config['INDEX'] = 'jumpserver_operate_log'
super().__init__(config, properties, keyword_fields, exact_fields, fuzzy_fields, match_fields)
super().__init__(config, properties, keyword_fields, exact_fields, match_fields)
self.pre_use_check()
@staticmethod

View File

@@ -1,6 +1,6 @@
import os
import uuid
from datetime import timedelta, datetime
from datetime import timedelta
from importlib import import_module
from django.conf import settings
@@ -40,7 +40,7 @@ __all__ = [
class JobLog(JobExecution):
@property
def creator_name(self) -> str:
def creator_name(self):
return self.creator.name
class Meta:
@@ -73,9 +73,6 @@ class FTPLog(OrgModelMixin):
models.Index(fields=['date_start', 'org_id'], name='idx_date_start_org'),
]
def __str__(self):
return "{0.id} of {0.user} to {0.asset}".format(self)
@property
def filepath(self):
return os.path.join(self.upload_to, self.date_start.strftime('%Y-%m-%d'), str(self.id))
@@ -189,15 +186,6 @@ class PasswordChangeLog(models.Model):
class Meta:
verbose_name = _("Password change log")
@staticmethod
def filter_queryset_by_org(queryset):
if not current_org.is_root():
users = current_org.get_members()
queryset = queryset.filter(
user__in=[str(user) for user in users]
)
return queryset
class UserLoginLog(models.Model):
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
@@ -232,7 +220,7 @@ class UserLoginLog(models.Model):
return '%s(%s)' % (self.username, self.city)
@property
def backend_display(self) -> str:
def backend_display(self):
return gettext(self.backend)
@classmethod
@@ -258,7 +246,7 @@ class UserLoginLog(models.Model):
return login_logs
@property
def reason_display(self) -> str:
def reason_display(self):
from authentication.errors import reason_choices, old_reason_choices
reason = reason_choices.get(self.reason)
@@ -267,15 +255,6 @@ class UserLoginLog(models.Model):
reason = old_reason_choices.get(self.reason, self.reason)
return reason
@staticmethod
def filter_queryset_by_org(queryset):
from audits.utils import construct_userlogin_usernames
if current_org.is_root() or not settings.XPACK_ENABLED:
return queryset
user_queryset = current_org.get_members()
users = construct_userlogin_usernames(user_queryset)
return queryset.filter(username__in=users)
class Meta:
ordering = ["-datetime", "username"]
verbose_name = _("User login log")
@@ -300,15 +279,15 @@ class UserSession(models.Model):
return '%s(%s)' % (self.user, self.ip)
@property
def backend_display(self) -> str:
def backend_display(self):
return gettext(self.backend)
@property
def is_active(self) -> bool:
def is_active(self):
return user_session_manager.check_active(self.key)
@property
def date_expired(self) -> datetime:
def date_expired(self):
session_store_cls = import_module(settings.SESSION_ENGINE).SessionStore
session_store = session_store_cls(session_key=self.key)
cache_key = session_store.cache_key

View File

@@ -119,11 +119,11 @@ class OperateLogSerializer(BulkOrgResourceModelSerializer):
fields = fields_small
@staticmethod
def get_resource_type(instance) -> str:
def get_resource_type(instance):
return _(instance.resource_type)
@staticmethod
def get_resource(instance) -> str:
def get_resource(instance):
return i18n_trans(instance.resource)
@@ -147,11 +147,11 @@ class ActivityUnionLogSerializer(serializers.Serializer):
r_type = serializers.CharField(read_only=True)
@staticmethod
def get_timestamp(obj) -> str:
def get_timestamp(obj):
return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
@staticmethod
def get_content(obj) -> str:
def get_content(obj):
if not obj['r_detail']:
action = obj['r_action'].replace('_', ' ').capitalize()
ctn = _('%s %s this resource') % (obj['r_user'], _(action).lower())
@@ -160,7 +160,7 @@ class ActivityUnionLogSerializer(serializers.Serializer):
return ctn
@staticmethod
def get_detail_url(obj) -> str:
def get_detail_url(obj):
detail_url = ''
detail_id, obj_type = obj['r_detail_id'], obj['r_type']
if not detail_id:
@@ -210,7 +210,7 @@ class UserSessionSerializer(serializers.ModelSerializer):
"backend_display": {"label": _("Auth backend display")},
}
def get_is_current_user_session(self, obj) -> bool:
def get_is_current_user_session(self, obj):
request = self.context.get('request')
if not request:
return False

View File

@@ -89,8 +89,6 @@ def create_activities(resource_ids, detail, detail_id, action, org_id):
for activity in activities:
create_activity(activity)
create_activity.finish()
@signals.after_task_publish.connect
def after_task_publish_for_activity_log(headers=None, body=None, **kwargs):

View File

@@ -180,7 +180,7 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
'PlatformAutomation', 'PlatformProtocol', 'Protocol',
'HistoricalAccount', 'GatheredUser', 'ApprovalRule',
'BaseAutomation', 'CeleryTask', 'Command', 'JobLog',
'ConnectionToken', 'SessionJoinRecord', 'SessionSharing',
'ConnectionToken', 'SessionJoinRecord',
'HistoricalJob', 'Status', 'TicketStep', 'Ticket',
'UserAssetGrantedTreeNodeRelation', 'TicketAssignee',
'SuperTicket', 'SuperConnectionToken', 'AdminConnectionToken', 'PermNode',

View File

@@ -2,19 +2,18 @@
#
import datetime
import os
import subprocess
from celery import shared_task
from django.conf import settings
from django.core.files.storage import default_storage
from django.db import transaction
from django.utils import timezone
from django.utils._os import safe_join
from django.utils.translation import gettext_lazy as _
from common.const.crontab import CRONTAB_AT_AM_TWO
from common.storage.ftp_file import FTPFileStorageHandler
from common.utils import get_log_keep_day, get_logger
from common.utils.safe import safe_run_cmd
from ops.celery.decorator import register_as_period_task
from ops.models import CeleryTaskExecution
from orgs.utils import tmp_to_root_org
@@ -58,12 +57,14 @@ def clean_ftp_log_period():
now = timezone.now()
days = get_log_keep_day('FTP_LOG_KEEP_DAYS')
expired_day = now - datetime.timedelta(days=days)
file_store_dir = safe_join(default_storage.base_location, FTPLog.upload_to)
file_store_dir = os.path.join(default_storage.base_location, FTPLog.upload_to)
FTPLog.objects.filter(date_start__lt=expired_day).delete()
command = "find %s -mtime +%s -type f -exec rm -f {} \\;"
safe_run_cmd(command, (file_store_dir, days))
command = "find %s -type d -empty -delete;"
safe_run_cmd(command, (file_store_dir,))
command = "find %s -mtime +%s -type f -exec rm -f {} \\;" % (
file_store_dir, days
)
subprocess.call(command, shell=True)
command = "find %s -type d -empty -delete;" % file_store_dir
subprocess.call(command, shell=True)
logger.info("Clean FTP file done")
@@ -75,11 +76,12 @@ def clean_celery_tasks_period():
tasks.delete()
tasks = CeleryTaskExecution.objects.filter(date_start__isnull=True)
tasks.delete()
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;"
safe_run_cmd(command, (settings.CELERY_LOG_DIR, expire_days))
celery_log_path = safe_join(settings.LOG_DIR, 'celery.log')
command = "echo > %s"
safe_run_cmd(command, (celery_log_path,))
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;" % (
settings.CELERY_LOG_DIR, expire_days
)
subprocess.call(command, shell=True)
command = "echo > {}".format(os.path.join(settings.LOG_DIR, 'celery.log'))
subprocess.call(command, shell=True)
def batch_delete(queryset, batch_size=3000):
@@ -117,15 +119,15 @@ def clean_expired_session_period():
expired_sessions = Session.objects.filter(date_start__lt=expire_date)
timestamp = expire_date.timestamp()
expired_commands = Command.objects.filter(timestamp__lt=timestamp)
replay_dir = safe_join(default_storage.base_location, 'replay')
replay_dir = os.path.join(default_storage.base_location, 'replay')
batch_delete(expired_sessions)
logger.info("Clean session item done")
batch_delete(expired_commands)
logger.info("Clean session command done")
remove_files_by_days(replay_dir, days)
command = "find %s -type d -empty -delete;"
safe_run_cmd(command, (replay_dir,))
command = "find %s -type d -empty -delete;" % replay_dir
subprocess.call(command, shell=True)
logger.info("Clean session replay done")

View File

@@ -1,6 +1,5 @@
import copy
from datetime import datetime
from itertools import chain
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.core.exceptions import ObjectDoesNotExist
@@ -8,6 +7,7 @@ from django.db import models
from django.db.models import F, Value, CharField
from django.db.models.functions import Concat
from django.utils import translation
from itertools import chain
from common.db.fields import RelatedManager
from common.utils import validate_ip, get_ip_city, get_logger
@@ -16,6 +16,7 @@ from .const import DEFAULT_CITY, ActivityChoices as LogChoice
from .handler import create_or_update_operate_log
from .models import ActivityLog
logger = get_logger(__name__)
@@ -150,7 +151,7 @@ def record_operate_log_and_activity_log(ids, action, detail, model, **kwargs):
org_id = current_org.id
with translation.override('en'):
resource_type = kwargs.pop('resource_type', None) or model._meta.verbose_name
resource_type = model._meta.verbose_name
create_or_update_operate_log(action, resource_type, force=True, **kwargs)
base_data = {'type': LogChoice.operate_log, 'detail': detail, 'org_id': org_id}
activities = [ActivityLog(resource_id=r_id, **base_data) for r_id in ids]

View File

@@ -618,8 +618,6 @@ class SuperConnectionTokenViewSet(ConnectionTokenViewSet):
token_id = request.data.get('id') or ''
token = ConnectionToken.get_typed_connection_token(token_id)
if not token:
raise PermissionDenied('Token {} is not valid'.format(token))
token.is_valid()
serializer = self.get_serializer(instance=token)

View File

@@ -14,6 +14,7 @@ from rest_framework.response import Response
from authentication.errors import ACLError
from common.api import JMSGenericViewSet
from common.const.http import POST, GET
from common.permissions import OnlySuperUser
from common.serializers import EmptySerializer
from common.utils import reverse, safe_next_url
from common.utils.timezone import utc_now
@@ -37,11 +38,8 @@ class SSOViewSet(AuthMixin, JMSGenericViewSet):
'login_url': SSOTokenSerializer,
'login': EmptySerializer
}
rbac_perms = {
'login_url': 'authentication.add_ssotoken',
}
@action(methods=[POST], detail=False, url_path='login-url')
@action(methods=[POST], detail=False, permission_classes=[OnlySuperUser], url_path='login-url')
def login_url(self, request, *args, **kwargs):
if not settings.AUTH_SSO:
raise SSOAuthClosed()

View File

@@ -1,9 +1,9 @@
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.views import View
from django.contrib.auth import get_user_model
from common.utils import get_logger
from users.models import User
from common.utils import get_logger
UserModel = get_user_model()
logger = get_logger(__file__)
@@ -61,13 +61,4 @@ class JMSBaseAuthBackend:
class JMSModelBackend(JMSBaseAuthBackend, ModelBackend):
def user_can_authenticate(self, user):
return True
class BaseAuthCallbackClientView(View):
http_method_names = ['get']
def get(self, request):
from authentication.views.utils import redirect_to_guard_view
return redirect_to_guard_view(query_string='next=client')
pass

View File

@@ -1,51 +1,14 @@
# -*- coding: utf-8 -*-
#
import threading
from django.conf import settings
from django.contrib.auth import get_user_model
from django_cas_ng.backends import CASBackend as _CASBackend
from django.conf import settings
from common.utils import get_logger
from ..base import JMSBaseAuthBackend
__all__ = ['CASBackend', 'CASUserDoesNotExist']
logger = get_logger(__name__)
class CASUserDoesNotExist(Exception):
"""Exception raised when a CAS user does not exist."""
pass
__all__ = ['CASBackend']
class CASBackend(JMSBaseAuthBackend, _CASBackend):
@staticmethod
def is_enabled():
return settings.AUTH_CAS
def authenticate(self, request, ticket, service):
UserModel = get_user_model()
manager = UserModel._default_manager
original_get_by_natural_key = manager.get_by_natural_key
thread_local = threading.local()
thread_local.thread_id = threading.get_ident()
logger.debug(f"CASBackend.authenticate: thread_id={thread_local.thread_id}")
def get_by_natural_key(self, username):
logger.debug(f"CASBackend.get_by_natural_key: thread_id={threading.get_ident()}, username={username}")
if threading.get_ident() != thread_local.thread_id:
return original_get_by_natural_key(username)
try:
user = original_get_by_natural_key(username)
except UserModel.DoesNotExist:
raise CASUserDoesNotExist(username)
return user
try:
manager.get_by_natural_key = get_by_natural_key.__get__(manager, type(manager))
user = super().authenticate(request, ticket=ticket, service=service)
finally:
manager.get_by_natural_key = original_get_by_natural_key
return user

View File

@@ -1,33 +1,23 @@
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect
from django.utils.translation import gettext_lazy as _
from django.views.generic import View
from django_cas_ng.views import LoginView
from authentication.backends.base import BaseAuthCallbackClientView
from common.utils import FlashMessageUtil
from .backends import CASUserDoesNotExist
__all__ = ['LoginView']
from authentication.views.utils import redirect_to_guard_view
class CASLoginView(LoginView):
def get(self, request):
try:
resp = super().get(request)
return resp
return super().get(request)
except PermissionDenied:
return HttpResponseRedirect('/')
except CASUserDoesNotExist as e:
message_data = {
'title': _('User does not exist: {}').format(e),
'error': _(
'CAS login was successful, but no corresponding local user was found in the system, and automatic '
'user creation is disabled in the CAS authentication configuration. Login failed.'),
'interval': 10,
'redirect_url': '/',
}
return FlashMessageUtil.gen_and_redirect_to(message_data)
class CASCallbackClientView(BaseAuthCallbackClientView):
pass
class CASCallbackClientView(View):
http_method_names = ['get', ]
def get(self, request):
return redirect_to_guard_view(query_string='next=client')

View File

@@ -5,10 +5,10 @@ from django.urls import reverse
from django.utils.http import urlencode
from django.views import View
from authentication.backends.base import BaseAuthCallbackClientView
from authentication.mixins import authenticate
from authentication.utils import build_absolute_uri
from authentication.views.mixins import FlashMessageMixin
from authentication.views.utils import redirect_to_guard_view
from common.utils import get_logger
logger = get_logger(__file__)
@@ -67,8 +67,11 @@ class OAuth2AuthCallbackView(View, FlashMessageMixin):
return HttpResponseRedirect(redirect_url)
class OAuth2AuthCallbackClientView(BaseAuthCallbackClientView):
pass
class OAuth2AuthCallbackClientView(View):
http_method_names = ['get', ]
def get(self, request):
return redirect_to_guard_view(query_string='next=client')
class OAuth2EndSessionView(View):

View File

@@ -224,6 +224,7 @@ class OIDCAuthCodeBackend(OIDCBaseBackend):
user_auth_failed.send(
sender=self.__class__, request=request, username=user.username,
reason="User is invalid", backend=settings.AUTH_BACKEND_OIDC_CODE
)
return None

View File

@@ -10,15 +10,16 @@ import datetime as dt
from calendar import timegm
from urllib.parse import urlparse
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.utils.encoding import force_bytes, smart_bytes
from jwkest import JWKESTException
from jwkest.jwk import KEYS
from jwkest.jws import JWS
from django.conf import settings
from common.utils import get_logger
logger = get_logger(__file__)
@@ -98,8 +99,7 @@ def _validate_claims(id_token, nonce=None, validate_nonce=True):
raise SuspiciousOperation('Incorrect id_token: nbf')
# Verifies that the token was issued in the allowed timeframe.
max_age = settings.AUTH_OPENID_ID_TOKEN_MAX_AGE
if utc_timestamp > id_token['iat'] + max_age:
if utc_timestamp > id_token['iat'] + settings.AUTH_OPENID_ID_TOKEN_MAX_AGE:
logger.debug(log_prompt.format('Incorrect id_token: iat'))
raise SuspiciousOperation('Incorrect id_token: iat')

View File

@@ -29,7 +29,7 @@ from authentication.utils import build_absolute_uri_for_oidc
from authentication.views.mixins import FlashMessageMixin
from common.utils import safe_next_url
from .utils import get_logger
from ..base import BaseAuthCallbackClientView
from ...views.utils import redirect_to_guard_view
logger = get_logger(__file__)
@@ -171,10 +171,9 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
logger.debug(log_prompt.format('Process authenticate'))
try:
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
except IntegrityError as e:
except IntegrityError:
title = _("OpenID Error")
msg = _('Please check if a user with the same username or email already exists')
logger.error(e, exc_info=True)
response = self.get_failed_response('/', title, msg)
return response
if user:
@@ -210,8 +209,11 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
return HttpResponseRedirect(settings.AUTH_OPENID_AUTHENTICATION_FAILURE_REDIRECT_URI)
class OIDCAuthCallbackClientView(BaseAuthCallbackClientView):
pass
class OIDCAuthCallbackClientView(View):
http_method_names = ['get', ]
def get(self, request):
return redirect_to_guard_view(query_string='next=client')
class OIDCEndSessionView(View):

View File

@@ -71,8 +71,7 @@ class PasskeyViewSet(AuthMixin, FlashMessageMixin, JMSModelViewSet):
return self.redirect_to_error(_('Auth failed'))
confirm_mfa = request.session.get('passkey_confirm_mfa')
# 如果开启了安全模式Passkey 不能作为 MFA
if confirm_mfa and not settings.SAFE_MODE:
if confirm_mfa:
request.session['CONFIRM_LEVEL'] = ConfirmType.values.index('mfa') + 1
request.session['CONFIRM_TIME'] = int(time.time())
request.session['CONFIRM_TYPE'] = ConfirmType.MFA
@@ -81,9 +80,7 @@ class PasskeyViewSet(AuthMixin, FlashMessageMixin, JMSModelViewSet):
try:
self.check_oauth2_auth(user, settings.AUTH_BACKEND_PASSKEY)
# 如果开启了安全模式passkey 不能作为 MFA
if not settings.SAFE_MODE:
self.mark_mfa_ok('passkey', user)
self.mark_mfa_ok('passkey', user)
return self.redirect_to_guard_view()
except Exception as e:
msg = getattr(e, 'msg', '') or str(e)

View File

@@ -19,7 +19,7 @@ from onelogin.saml2.idp_metadata_parser import (
from authentication.views.mixins import FlashMessageMixin
from common.utils import get_logger
from .settings import JmsSaml2Settings
from ..base import BaseAuthCallbackClientView
from ...views.utils import redirect_to_guard_view
logger = get_logger(__file__)
@@ -278,10 +278,9 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin, FlashMessageMixin):
saml_user_data = self.get_attributes(saml_instance)
try:
user = auth.authenticate(request=request, saml_user_data=saml_user_data)
except IntegrityError as e:
except IntegrityError:
title = _("SAML2 Error")
msg = _('Please check if a user with the same username or email already exists')
logger.error(e, exc_info=True)
response = self.get_failed_response('/', title, msg)
return response
if user and user.is_valid:
@@ -300,8 +299,11 @@ class Saml2AuthCallbackView(View, PrepareRequestMixin, FlashMessageMixin):
return super().dispatch(*args, **kwargs)
class Saml2AuthCallbackClientView(BaseAuthCallbackClientView):
pass
class Saml2AuthCallbackClientView(View):
http_method_names = ['get', ]
def get(self, request):
return redirect_to_guard_view(query_string='next=client')
class Saml2AuthMetadataView(View, PrepareRequestMixin):

Some files were not shown because too many files have changed in this diff Show More