Compare commits

..

40 Commits

Author SHA1 Message Date
ibuler
91f6614711 perf: token retrieve 2025-10-21 11:14:02 +08:00
老广
e617245b26 merge: to master 2025-10-16 17:30:34 +08:00
Bryan
9280884c1c Merge pull request #16056 from jumpserver/dev
v4.10.8-lts
2025-09-18 16:52:13 +08:00
Bryan
f31994fdcd Merge pull request #15899 from jumpserver/dev 2025-08-21 19:03:18 +08:00
Bryan
71766418bb Merge pull request #15742 from jumpserver/dev
merge: v4.10.4-lts
2025-07-17 15:12:58 +08:00
Bryan
a9399dd709 Merge pull request #15608 from jumpserver/dev
v4.10.2
2025-06-19 20:14:21 +08:00
Bryan
d0cb9e5432 Merge pull request #15412 from jumpserver/dev
v4.10.0
2025-05-15 17:11:43 +08:00
老广
558188da90 merge: dev to master
Ready to relase
2025-04-17 20:24:45 +08:00
Bryan
ad5460dab8 Merge pull request #15086 from jumpserver/dev
v4.8.0
2025-03-20 18:44:44 +08:00
Bryan
4d37dca0de Merge pull request #14901 from jumpserver/dev
v4.7.0
2025-02-20 10:21:16 +08:00
Bryan
2ca4002624 Merge pull request #14813 from jumpserver/dev
v4.6.0
2025-01-15 14:38:17 +08:00
Bryan
053d640e4c Merge pull request #14699 from jumpserver/dev
v4.5.0
2024-12-19 16:04:45 +08:00
Bryan
f3acc28ded Merge pull request #14697 from jumpserver/dev
v4.5.0
2024-12-19 15:57:11 +08:00
Bryan
25987545db Merge pull request #14511 from jumpserver/dev
v4.4.0
2024-11-21 19:00:35 +08:00
Bryan
6720ecc6e0 Merge pull request #14319 from jumpserver/dev
v4.3.0
2024-10-17 14:55:38 +08:00
老广
0b3a7bb020 Merge pull request #14203 from jumpserver/dev
merge: from dev to master
2024-09-19 19:37:19 +08:00
Bryan
56373e362b Merge pull request #13988 from jumpserver/dev
v4.1.0
2024-08-16 18:40:35 +08:00
Bryan
02fc045370 Merge pull request #13600 from jumpserver/dev
v4.0.0
2024-07-03 19:04:35 +08:00
Bryan
e4ac73896f Merge pull request #13452 from jumpserver/dev
v3.10.11-lts
2024-06-19 16:01:26 +08:00
Bryan
1518f792d6 Merge pull request #13236 from jumpserver/dev
v3.10.10-lts
2024-05-16 16:04:07 +08:00
Bai
67277dd622 fix: 修复仪表盘会话排序数量都是 1 的问题 2024-04-22 19:42:33 +08:00
Bryan
82e7f020ea Merge pull request #13094 from jumpserver/dev
v3.10.9 (dev to master)
2024-04-22 19:39:53 +08:00
Bryan
f20b9e01ab Merge pull request #13062 from jumpserver/dev
v3.10.8 dev to master
2024-04-18 18:01:20 +08:00
Bryan
8cf8a3701b Merge pull request #13059 from jumpserver/dev
v3.10.8
2024-04-18 17:16:37 +08:00
Bryan
7ba24293d1 Merge pull request #12736 from jumpserver/pr@dev@master_fix
fix: 解决冲突
2024-02-29 16:38:43 +08:00
Bai
f10114c9ed fix: 解决冲突 2024-02-29 16:37:10 +08:00
Bryan
cf31cbfb07 Merge pull request #12729 from jumpserver/dev
v3.10.4
2024-02-29 16:19:59 +08:00
wangruidong
0edad24d5d fix: 资产过期消息提示发送失败 2024-02-04 11:41:48 +08:00
ibuler
1f1c1a9157 fix: 修复定时检测用户是否活跃任务无法执行的问题 2024-01-23 09:28:38 +00:00
feng
6c9d271ae1 fix: redis 密码有特殊字符celery beat启动失败 2024-01-22 06:18:34 +00:00
Bai
6ff852e225 perf: 修复 Count 时没有去重的问题 2024-01-22 06:16:25 +00:00
Bryan
baa75dc735 Merge pull request #12566 from jumpserver/master
v3.10.2
2024-01-17 07:34:28 -04:00
Bryan
8a9f0436b8 Merge pull request #12565 from jumpserver/dev
v3.10.2
2024-01-17 07:23:30 -04:00
Bryan
a9620a3cbe Merge pull request #12461 from jumpserver/master
v3.10.1
2023-12-29 11:33:05 +05:00
Bryan
769e7dc8a0 Merge pull request #12460 from jumpserver/dev
v3.10.1
2023-12-29 11:20:36 +05:00
Bryan
2a70449411 Merge pull request #12458 from jumpserver/dev
v3.10.1
2023-12-29 11:01:13 +05:00
Bryan
8df720f19e Merge pull request #12401 from jumpserver/dev
v3.10
2023-12-21 15:14:19 +05:00
老广
dabbb45f6e Merge pull request #12144 from jumpserver/dev
v3.9.0
2023-11-16 18:23:05 +08:00
Bryan
ce24c1c3fd Merge pull request #11914 from jumpserver/dev
v3.8.0
2023-10-19 03:37:39 -05:00
Bryan
3c54c82ce9 Merge pull request #11636 from jumpserver/dev
v3.7.0
2023-09-21 17:02:48 +08:00
28 changed files with 149 additions and 610 deletions

View File

@@ -1,123 +0,0 @@
name: Cleanup PR Branches
on:
schedule:
# 每天凌晨2点运行
- cron: '0 2 * * *'
workflow_dispatch:
# 允许手动触发
inputs:
dry_run:
description: 'Dry run mode (default: true)'
required: false
default: 'true'
type: boolean
jobs:
cleanup-branches:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0 # 获取所有分支和提交历史
- name: Setup Git
run: |
git config --global user.name "GitHub Actions"
git config --global user.email "actions@github.com"
- name: Get dry run setting
id: dry-run
run: |
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
echo "dry_run=${{ github.event.inputs.dry_run }}" >> $GITHUB_OUTPUT
else
echo "dry_run=false" >> $GITHUB_OUTPUT
fi
- name: Cleanup branches
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
DRY_RUN: ${{ steps.dry-run.outputs.dry_run }}
run: |
echo "Starting branch cleanup..."
echo "Dry run mode: $DRY_RUN"
# 获取所有本地分支
git fetch --all --prune
# 获取以 pr 或 repr 开头的分支
branches=$(git branch -r | grep -E 'origin/(pr|repr)' | sed 's/origin\///' | grep -v 'HEAD')
echo "Found branches matching pattern:"
echo "$branches"
deleted_count=0
skipped_count=0
for branch in $branches; do
echo ""
echo "Processing branch: $branch"
# 检查分支是否有未合并的PR
pr_info=$(gh pr list --head "$branch" --state open --json number,title,state 2>/dev/null)
if [ $? -eq 0 ] && [ "$pr_info" != "[]" ]; then
echo " ⚠️ Branch has open PR(s), skipping deletion"
echo " PR info: $pr_info"
skipped_count=$((skipped_count + 1))
continue
fi
# 检查分支是否有已合并的PR可选如果PR已合并也可以删除
merged_pr_info=$(gh pr list --head "$branch" --state merged --json number,title,state 2>/dev/null)
if [ $? -eq 0 ] && [ "$merged_pr_info" != "[]" ]; then
echo " ✅ Branch has merged PR(s), safe to delete"
echo " Merged PR info: $merged_pr_info"
else
echo " No PRs found for this branch"
fi
# 执行删除操作
if [ "$DRY_RUN" = "true" ]; then
echo " 🔍 [DRY RUN] Would delete branch: $branch"
deleted_count=$((deleted_count + 1))
else
echo " 🗑️ Deleting branch: $branch"
# 删除远程分支
if git push origin --delete "$branch" 2>/dev/null; then
echo " ✅ Successfully deleted remote branch: $branch"
deleted_count=$((deleted_count + 1))
else
echo " ❌ Failed to delete remote branch: $branch"
fi
fi
done
echo ""
echo "=== Cleanup Summary ==="
echo "Branches processed: $(echo "$branches" | wc -l)"
echo "Branches deleted: $deleted_count"
echo "Branches skipped: $skipped_count"
if [ "$DRY_RUN" = "true" ]; then
echo ""
echo "🔍 This was a DRY RUN - no branches were actually deleted"
echo "To perform actual deletion, run this workflow manually with dry_run=false"
fi
- name: Create summary
if: always()
run: |
echo "## Branch Cleanup Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "**Workflow:** ${{ github.workflow }}" >> $GITHUB_STEP_SUMMARY
echo "**Run ID:** ${{ github.run_id }}" >> $GITHUB_STEP_SUMMARY
echo "**Dry Run:** ${{ steps.dry-run.outputs.dry_run }}" >> $GITHUB_STEP_SUMMARY
echo "**Triggered by:** ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "Check the logs above for detailed information about processed branches." >> $GITHUB_STEP_SUMMARY

View File

@@ -1,9 +1,11 @@
name: 🔀 Sync mirror to Gitee
on:
schedule:
# 每天凌晨3点运行
- cron: '0 3 * * *'
push:
branches:
- master
- dev
create:
jobs:
mirror:
@@ -12,6 +14,7 @@ jobs:
steps:
- name: mirror
continue-on-error: true
if: github.event_name == 'push' || (github.event_name == 'create' && github.event.ref_type == 'tag')
uses: wearerequired/git-mirror-action@v1
env:
SSH_PRIVATE_KEY: ${{ secrets.GITEE_SSH_PRIVATE_KEY }}

View File

@@ -1,4 +1,4 @@
FROM jumpserver/core-base:20251029_031929 AS stage-build
FROM jumpserver/core-base:20251014_095903 AS stage-build
ARG VERSION

View File

@@ -25,8 +25,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
}
rbac_perms = {
'get_once_secret': 'accounts.change_integrationapplication',
'get_account_secret': 'accounts.view_integrationapplication',
'get_sdks_info': 'accounts.view_integrationapplication'
'get_account_secret': 'accounts.view_integrationapplication'
}
def read_file(self, path):
@@ -37,6 +36,7 @@ class IntegrationApplicationViewSet(OrgBulkModelViewSet):
@action(
['GET'], detail=False, url_path='sdks',
permission_classes=[IsValidUser]
)
def get_sdks_info(self, request, *args, **kwargs):
code_suffix_mapper = {

View File

@@ -309,10 +309,10 @@ class AssetAccountBulkSerializer(
class Meta:
model = Account
fields = [
'name', 'username', 'secret', 'secret_type', 'secret_reset',
'passphrase', 'privileged', 'is_active', 'comment', 'template',
'on_invalid', 'push_now', 'params', 'assets', 'su_from_username',
'source', 'source_id',
'name', 'username', 'secret', 'secret_type', 'passphrase',
'privileged', 'is_active', 'comment', 'template',
'on_invalid', 'push_now', 'params', 'assets',
'su_from_username', 'source', 'source_id',
]
extra_kwargs = {
'name': {'required': False},

View File

@@ -113,7 +113,7 @@ class BaseAssetViewSet(OrgBulkModelViewSet):
("accounts", AccountSerializer),
)
rbac_perms = (
("match", "assets.view_asset"),
("match", "assets.match_asset"),
("platform", "assets.view_platform"),
("gateways", "assets.view_gateway"),
("accounts", "assets.view_account"),

View File

@@ -43,7 +43,7 @@ class NodeViewSet(SuggestionMixin, OrgBulkModelViewSet):
search_fields = ('full_value',)
serializer_class = serializers.NodeSerializer
rbac_perms = {
'match': 'assets.view_node',
'match': 'assets.match_node',
'check_assets_amount_task': 'assets.change_node'
}

View File

@@ -112,10 +112,8 @@ class PlatformProtocolViewSet(JMSModelViewSet):
class PlatformAutomationMethodsApi(generics.ListAPIView):
permission_classes = (IsValidUser,)
queryset = PlatformAutomation.objects.none()
rbac_perms = {
'list': 'assets.view_platform'
}
@staticmethod
def automation_methods():

View File

@@ -134,7 +134,6 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
log_prompt = "Process GET requests [OIDCAuthCallbackView]: {}"
logger.debug(log_prompt.format('Start'))
callback_params = request.GET
error_title = _("OpenID Error")
# Retrieve the state value that was previously generated. No state means that we cannot
# authenticate the user (so a failure should be returned).
@@ -173,9 +172,10 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
try:
user = auth.authenticate(nonce=nonce, request=request, code_verifier=code_verifier)
except IntegrityError as e:
title = _("OpenID Error")
msg = _('Please check if a user with the same username or email already exists')
logger.error(e, exc_info=True)
response = self.get_failed_response('/', error_title, msg)
response = self.get_failed_response('/', title, msg)
return response
if user:
logger.debug(log_prompt.format('Login: {}'.format(user)))
@@ -194,6 +194,7 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
return HttpResponseRedirect(
next_url or settings.AUTH_OPENID_AUTHENTICATION_REDIRECT_URI
)
if 'error' in callback_params:
logger.debug(
log_prompt.format('Error in callback params: {}'.format(callback_params['error']))
@@ -204,12 +205,9 @@ class OIDCAuthCallbackView(View, FlashMessageMixin):
# OpenID Connect Provider authenticate endpoint.
logger.debug(log_prompt.format('Logout'))
auth.logout(request)
redirect_url = settings.AUTH_OPENID_AUTHENTICATION_FAILURE_REDIRECT_URI
if not user and getattr(request, 'error_message', ''):
response = self.get_failed_response(redirect_url, title=error_title, msg=request.error_message)
return response
logger.debug(log_prompt.format('Redirect'))
return HttpResponseRedirect(redirect_url)
return HttpResponseRedirect(settings.AUTH_OPENID_AUTHENTICATION_FAILURE_REDIRECT_URI)
class OIDCAuthCallbackClientView(BaseAuthCallbackClientView):

View File

@@ -1,7 +1,6 @@
# -*- coding: utf-8 -*-
#
import inspect
import threading
import time
import uuid
from functools import partial
@@ -13,7 +12,6 @@ from django.contrib.auth import (
BACKEND_SESSION_KEY, load_backend,
PermissionDenied, user_login_failed, _clean_credentials,
)
from django.contrib.auth import get_user_model
from django.core.cache import cache
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import reverse, redirect, get_object_or_404
@@ -48,10 +46,6 @@ def _get_backends(return_tuples=False):
return backends
class OnlyAllowExistUserAuthError(Exception):
pass
auth._get_backends = _get_backends
@@ -60,24 +54,6 @@ def authenticate(request=None, **credentials):
If the given credentials are valid, return a User object.
之所以 hack 这个 authenticate
"""
UserModel = get_user_model()
original_get_or_create = UserModel.objects.get_or_create
thread_local = threading.local()
thread_local.thread_id = threading.get_ident()
def custom_get_or_create(self, *args, **kwargs):
logger.debug(f"get_or_create: thread_id={threading.get_ident()}, username={username}")
if threading.get_ident() != thread_local.thread_id or not settings.ONLY_ALLOW_EXIST_USER_AUTH:
return original_get_or_create(*args, **kwargs)
create_username = kwargs.get('username')
try:
UserModel.objects.get(username=create_username)
except UserModel.DoesNotExist:
raise OnlyAllowExistUserAuthError
return original_get_or_create(*args, **kwargs)
username = credentials.get('username')
temp_user = None
@@ -95,19 +71,10 @@ def authenticate(request=None, **credentials):
# This backend doesn't accept these credentials as arguments. Try the next one.
continue
try:
UserModel.objects.get_or_create = custom_get_or_create.__get__(UserModel.objects)
user = backend.authenticate(request, **credentials)
except PermissionDenied:
# This backend says to stop in our tracks - this user should not be allowed in at all.
break
except OnlyAllowExistUserAuthError:
request.error_message = _(
'''The administrator has enabled "Only allow existing users to log in",
and the current user is not in the user list. Please contact the administrator.'''
)
continue
finally:
UserModel.objects.get_or_create = original_get_or_create
if user is None:
continue

View File

@@ -15,7 +15,7 @@ from common.utils import get_logger
from common.utils.common import get_request_ip
from common.utils.django import reverse, get_object_or_none
from users.models import User
from users.signal_handlers import bind_user_to_org_role, check_only_allow_exist_user_auth
from users.signal_handlers import check_only_allow_exist_user_auth, bind_user_to_org_role
from .mixins import FlashMessageMixin
logger = get_logger(__file__)
@@ -55,6 +55,7 @@ class BaseLoginCallbackView(AuthMixin, FlashMessageMixin, IMClientMixin, View):
)
if not check_only_allow_exist_user_auth(create):
user.delete()
return user, (self.msg_client_err, self.request.error_message)
setattr(user, f'{self.user_type}_id', user_id)

View File

@@ -1,11 +1,9 @@
# -*- coding: utf-8 -*-
#
from django.conf import settings
from typing import Callable
from django.utils.translation import gettext as _
from rest_framework.decorators import action
from rest_framework.throttling import UserRateThrottle
from rest_framework.request import Request
from rest_framework.response import Response
@@ -16,12 +14,8 @@ from orgs.utils import current_org
__all__ = ['SuggestionMixin', 'RenderToJsonMixin']
class CustomUserRateThrottle(UserRateThrottle):
rate = '60/m'
class SuggestionMixin:
suggestion_limit = settings.SUGGESTION_LIMIT
suggestion_limit = 10
filter_queryset: Callable
get_queryset: Callable
@@ -41,7 +35,6 @@ class SuggestionMixin:
queryset = queryset.none()
queryset = self.filter_queryset(queryset)
queryset = queryset[:self.suggestion_limit]
page = self.paginate_queryset(queryset)
@@ -52,11 +45,6 @@ class SuggestionMixin:
serializer = self.get_serializer(queryset, many=True)
return Response(serializer.data)
def get_throttles(self):
if self.action == 'match':
return [CustomUserRateThrottle()]
return super().get_throttles()
class RenderToJsonMixin:
@action(methods=[POST, PUT], detail=False, url_path='render-to-json')

View File

@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
#
from rest_framework.filters import SearchFilter as SearchFilterBase
import base64
import json
import logging
@@ -36,14 +35,6 @@ __all__ = [
]
class SearchFilter(SearchFilterBase):
def get_search_terms(self, request):
params = request.query_params.get(self.search_param, '') or request.query_params.get('search', '')
params = params.replace('\x00', '') # strip null characters
params = params.replace(',', ' ')
return params.split()
class BaseFilterSet(drf_filters.FilterSet):
days = drf_filters.NumberFilter(method="filter_days")
days__lt = drf_filters.NumberFilter(method="filter_days")

View File

@@ -1,13 +1,9 @@
import re
import uuid
import time
from django.conf import settings
from django.core.management.base import BaseCommand
from django.test import Client
from django.urls import URLPattern, URLResolver
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AnonymousUser
from jumpserver.urls import api_v1
@@ -89,262 +85,50 @@ known_error_urls = [
'/api/v1/terminal/sessions/00000000-0000-0000-0000-000000000000/replay/download/',
]
# API 白名单 - 普通用户可以访问的 API
user_accessible_urls = known_unauth_urls + [
# 添加更多普通用户可以访问的 API
"/api/v1/settings/public/",
"/api/v1/users/profile/",
"/api/v1/users/change-password/",
"/api/v1/users/logout/",
"/api/v1/settings/chatai-prompts/",
"/api/v1/authentication/confirm/",
"/api/v1/users/connection-token/",
"/api/v1/authentication/temp-tokens/",
"/api/v1/notifications/backends/",
"/api/v1/authentication/passkeys/",
"/api/v1/orgs/orgs/current/",
"/api/v1/tickets/apply-asset-tickets/",
"/api/v1/ops/celery/task/00000000-0000-0000-0000-000000000000/task-execution/00000000-0000-0000-0000-000000000000/log/",
"/api/v1/assets/favorite-assets/",
"/api/v1/authentication/connection-token/",
"/api/v1/ops/jobs/",
"/api/v1/assets/categories/",
"/api/v1/tickets/tickets/",
"/api/v1/authentication/ssh-key/",
"/api/v1/terminal/my-sessions/",
"/api/v1/authentication/access-keys/",
"/api/v1/users/profile/permissions/",
"/api/v1/tickets/apply-login-asset-tickets/",
"/api/v1/resources/",
"/api/v1/ops/celery/task/00000000-0000-0000-0000-000000000000/task-execution/00000000-0000-0000-0000-000000000000/result/",
"/api/v1/notifications/site-messages/",
"/api/v1/notifications/site-messages/unread-total/",
"/api/v1/assets/assets/suggestions/",
"/api/v1/search/",
"/api/v1/notifications/user-msg-subscription/",
"/api/v1/ops/ansible/job-execution/00000000-0000-0000-0000-000000000000/log/",
"/api/v1/tickets/apply-login-tickets/",
"/api/v1/ops/variables/form-data/",
"/api/v1/ops/variables/help/",
"/api/v1/users/profile/password/",
"/api/v1/tickets/apply-command-tickets/",
"/api/v1/ops/job-executions/",
"/api/v1/audits/my-login-logs/",
"/api/v1/terminal/components/connect-methods/"
"/api/v1/ops/task-executions/",
"/api/v1/terminal/sessions/online-info/",
"/api/v1/ops/adhocs/",
"/api/v1/tickets/apply-nodes/suggestions/",
"/api/v1/tickets/apply-assets/suggestions/",
"/api/v1/settings/server-info/",
"/api/v1/ops/playbooks/",
"/api/v1/assets/categories/types/",
"/api/v1/assets/protocols/",
"/api/v1/common/countries/",
"/api/v1/audits/jobs/",
"/api/v1/terminal/components/connect-methods/",
"/api/v1/ops/task-executions/",
]
errors = {}
class Command(BaseCommand):
"""
Check API authorization and user access permissions.
This command performs two types of checks:
1. Anonymous access check - finds APIs that can be accessed without authentication
2. User access check - finds APIs that can be accessed by a normal user
The functionality is split into two methods:
- check_anonymous_access(): Checks for APIs accessible without authentication
- check_user_access(): Checks for APIs accessible by a normal user
Usage examples:
# Check both anonymous and user access (default behavior)
python manage.py check_api
# Check only anonymous access
python manage.py check_api --skip-user-check
# Check only user access
python manage.py check_api --skip-anonymous-check
# Check user access and update whitelist
python manage.py check_api --update-whitelist
"""
help = 'Check API authorization and user access permissions'
password = uuid.uuid4().hex
unauth_urls = []
error_urls = []
unformat_urls = []
# 用户可以访问的 API但不在白名单中的 API
unexpected_access = []
help = 'Check api if unauthorized'
def add_arguments(self, parser):
parser.add_argument(
'--skip-anonymous-check',
action='store_true',
help='Skip anonymous access check (only check user access)',
)
parser.add_argument(
'--skip-user-check',
action='store_true',
help='Skip user access check (only check anonymous access)',
)
parser.add_argument(
'--update-whitelist',
action='store_true',
help='Update the user accessible URLs whitelist based on current scan results',
)
def create_test_user(self):
"""创建测试用户"""
User = get_user_model()
username = 'test_user_api_check'
email = 'test@example.com'
# 删除可能存在的测试用户
User.objects.filter(username=username).delete()
# 创建新的测试用户
user = User.objects.create_user(
username=username,
email=email,
password=self.password,
is_active=True
)
return user
def check_user_api_access(self, urls):
"""检查普通用户可以访问的 API"""
user = self.create_test_user()
def handle(self, *args, **options):
settings.LOG_LEVEL = 'ERROR'
urls = get_api_urls()
client = Client()
client.defaults['HTTP_HOST'] = 'localhost'
# 登录用户
login_success = client.login(username=user.username, password=self.password)
if not login_success:
self.stdout.write(
self.style.ERROR('Failed to login test user')
)
return [], []
accessible_urls = []
unauth_urls = []
error_urls = []
self.stdout.write('Checking user API access...')
for url, ourl in urls:
if '(' in url or '<' in url:
continue
try:
response = client.get(url, follow=True)
time.sleep(0.1)
# 如果状态码是 200 或 201说明用户可以访问
if response.status_code in [200, 201]:
accessible_urls.append((url, ourl, response.status_code))
elif response.status_code == 403:
# 403 表示权限不足,这是正常的
pass
else:
# 其他状态码可能是错误
error_urls.append((url, ourl, response.status_code))
except Exception as e:
error_urls.append((url, ourl, str(e)))
# 清理测试用户
user.delete()
return accessible_urls, error_urls
def check_anonymous_access(self, urls):
"""检查匿名访问权限"""
client = Client()
client.defaults['HTTP_HOST'] = 'localhost'
unformat_urls = []
for url, ourl in urls:
if '(' in url or '<' in url:
self.unformat_urls.append([url, ourl])
unformat_urls.append([url, ourl])
continue
try:
response = client.get(url, follow=True)
if response.status_code != 401:
errors[url] = str(response.status_code) + ' ' + str(ourl)
self.unauth_urls.append(url)
unauth_urls.append(url)
except Exception as e:
errors[url] = str(e)
self.error_urls.append(url)
error_urls.append(url)
self.unauth_urls = set(self.unauth_urls) - set(known_unauth_urls)
self.error_urls = set(self.error_urls)
self.unformat_urls = set(self.unformat_urls)
def print_anonymous_access_result(self):
print("\n=== Anonymous Access Check ===")
print("Unauthorized urls:")
if not self.unauth_urls:
unauth_urls = set(unauth_urls) - set(known_unauth_urls)
print("\nUnauthorized urls:")
if not unauth_urls:
print(" Empty, very good!")
for url in self.unauth_urls:
for url in unauth_urls:
print('"{}", {}'.format(url, errors.get(url, '')))
print("\nError urls:")
if not self.error_urls:
if not error_urls:
print(" Empty, very good!")
for url in set(self.error_urls):
for url in set(error_urls):
print(url, ': ' + errors.get(url))
print("\nUnformat urls:")
if not self.unformat_urls:
if not unformat_urls:
print(" Empty, very good!")
for url in self.unformat_urls:
for url in unformat_urls:
print(url)
def check_user_access(self, urls, update_whitelist=False):
"""检查用户访问权限"""
print("\n=== User Access Check ===")
accessible_urls, user_error_urls = self.check_user_api_access(urls)
# 检查是否有不在白名单中的可访问 API
accessible_url_list = [url for url, _, _ in accessible_urls]
unexpected_access = set(accessible_url_list) - set(user_accessible_urls)
self.unexpected_access = unexpected_access
# 如果启用了更新白名单选项
if update_whitelist:
print("\n=== Updating Whitelist ===")
new_whitelist = sorted(set(user_accessible_urls + accessible_url_list))
print("Updated whitelist would include:")
for url in new_whitelist:
print(f' "{url}",')
print(f"\nTotal URLs in whitelist: {len(new_whitelist)}")
def print_user_access_result(self):
print("\n=== User Access Check ===")
print("User unexpected urls:")
if self.unexpected_access:
print(f" Error: Found {len(self.unexpected_access)} URLs accessible by user but not in whitelist:")
for url in self.unexpected_access:
print(f' "{url}"')
else:
print(" Empty, very good!")
def handle(self, *args, **options):
settings.LOG_LEVEL = 'ERROR'
urls = get_api_urls()
# 检查匿名访问权限(默认执行)
if not options['skip_anonymous_check']:
self.check_anonymous_access(urls)
# 检查用户访问权限(默认执行)
if not options['skip_user_check']:
self.check_user_access(urls, options['update_whitelist'])
print("\nCheck total urls: ", len(urls))
self.print_anonymous_access_result()
self.print_user_access_result()

View File

@@ -207,8 +207,7 @@ class WeComTool(object):
def check_state(self, state, request=None):
return cache.get(state) == self.WECOM_STATE_VALUE or \
request.session.get(self.WECOM_STATE_SESSION_KEY) == state or \
request.GET.get('state') == state # 在企业微信桌面端打开的话,重新创建了个 session会导致 session 校验失败
request.session[self.WECOM_STATE_SESSION_KEY] == state
def wrap_redirect_url(self, next_url):
params = {

View File

@@ -1,137 +1,16 @@
import json
import threading
import time
import redis
from django.core.cache import cache
from redis.client import PubSub
from common.db.utils import safe_db_connection
from common.utils import get_logger
logger = get_logger(__name__)
import threading
from concurrent.futures import ThreadPoolExecutor
_PUBSUB_HUBS = {}
def _get_pubsub_hub(db=10):
hub = _PUBSUB_HUBS.get(db)
if not hub:
hub = PubSubHub(db=db)
_PUBSUB_HUBS[db] = hub
return hub
class PubSubHub:
def __init__(self, db=10):
self.db = db
self.redis = get_redis_client(db)
self.pubsub = self.redis.pubsub()
self.handlers = {}
self.lock = threading.RLock()
self.listener = None
self.running = False
self.executor = ThreadPoolExecutor(max_workers=8, thread_name_prefix='pubsub_handler')
def __del__(self):
self.executor.shutdown(wait=True)
def start(self):
with self.lock:
if self.listener and self.listener.is_alive():
return
self.running = True
self.listener = threading.Thread(name='pubsub_listen', target=self._listen_loop, daemon=True)
self.listener.start()
def _listen_loop(self):
backoff = 1
while self.running:
try:
for msg in self.pubsub.listen():
if msg.get("type") != "message":
continue
ch = msg.get("channel")
if isinstance(ch, bytes):
ch = ch.decode()
data = msg.get("data")
try:
if isinstance(data, bytes):
item = json.loads(data.decode())
elif isinstance(data, str):
item = json.loads(data)
else:
item = data
except Exception:
item = data
# 使用线程池处理消息
future = self.executor.submit(self._dispatch, ch, msg, item)
future.add_done_callback(
lambda f: f.exception() and logger.error(f"handle pubsub msg {msg} failed: {f.exception()}"))
backoff = 1
except Exception as e:
logger.error(f'PubSub listen error: {e}')
time.sleep(backoff)
backoff = min(backoff * 2, 30)
try:
self._reconnect()
except Exception as re:
logger.error(f'PubSub reconnect error: {re}')
def _dispatch(self, ch, raw_msg, item):
with self.lock:
handler = self.handlers.get(ch)
if not handler:
return
_next, error, _complete = handler
try:
with safe_db_connection():
_next(item)
except Exception as e:
logger.error(f'Subscribe handler handle msg error: {e}')
try:
if error:
error(raw_msg, item)
except Exception:
pass
def add_subscription(self, pb, _next, error, complete):
ch = pb.ch
with self.lock:
existed = bool(self.handlers.get(ch))
self.handlers[ch] = (_next, error, complete)
try:
if not existed:
self.pubsub.subscribe(ch)
except Exception as e:
logger.error(f'Subscribe channel {ch} error: {e}')
self.start()
return Subscription(pb=pb, hub=self, ch=ch, handler=(_next, error, complete))
def remove_subscription(self, sub):
ch = sub.ch
with self.lock:
existed = self.handlers.pop(ch, None)
if existed:
try:
self.pubsub.unsubscribe(ch)
except Exception as e:
logger.warning(f'Unsubscribe {ch} error: {e}')
def _reconnect(self):
with self.lock:
channels = [ch for ch, h in self.handlers.items() if h]
try:
self.pubsub.close()
except Exception:
pass
self.redis = get_redis_client(self.db)
self.pubsub = self.redis.pubsub()
if channels:
self.pubsub.subscribe(channels)
def get_redis_client(db=0):
client = cache.client.get_client()
@@ -146,11 +25,15 @@ class RedisPubSub:
self.redis = get_redis_client(db)
def subscribe(self, _next, error=None, complete=None):
hub = _get_pubsub_hub(self.db)
return hub.add_subscription(self, _next, error, complete)
ps = self.redis.pubsub()
ps.subscribe(self.ch)
sub = Subscription(self, ps)
sub.keep_handle_msg(_next, error, complete)
return sub
def resubscribe(self, _next, error=None, complete=None):
return self.subscribe(_next, error, complete)
self.redis = get_redis_client(self.db)
self.subscribe(_next, error, complete)
def publish(self, data):
data_json = json.dumps(data)
@@ -159,19 +42,85 @@ class RedisPubSub:
class Subscription:
def __init__(self, pb: RedisPubSub, hub: PubSubHub, ch: str, handler):
def __init__(self, pb: RedisPubSub, sub: PubSub):
self.pb = pb
self.ch = ch
self.hub = hub
self.handler = handler
self.ch = pb.ch
self.sub = sub
self.unsubscribed = False
def unsubscribe(self):
if self.unsubscribed:
return
self.unsubscribed = True
logger.info(f"Unsubscribed from channel: {self.ch}")
def _handle_msg(self, _next, error, complete):
"""
handle arg is the pub published
:param _next: next msg handler
:param error: error msg handler
:param complete: complete msg handler
:return:
"""
msgs = self.sub.listen()
if error is None:
error = lambda m, i: None
if complete is None:
complete = lambda: None
try:
self.hub.remove_subscription(self)
for msg in msgs:
if msg["type"] != "message":
continue
item = None
try:
item_json = msg['data'].decode()
item = json.loads(item_json)
with safe_db_connection():
_next(item)
except Exception as e:
error(msg, item)
logger.error('Subscribe handler handle msg error: {}'.format(e))
except Exception as e:
if self.unsubscribed:
logger.debug('Subscription unsubscribed')
else:
logger.error('Consume msg error: {}'.format(e))
self.retry(_next, error, complete)
return
try:
complete()
except Exception as e:
logger.error('Complete subscribe error: {}'.format(e))
pass
try:
self.unsubscribe()
except Exception as e:
logger.error("Redis observer close error: {}".format(e))
def keep_handle_msg(self, _next, error, complete):
t = threading.Thread(target=self._handle_msg, args=(_next, error, complete))
t.daemon = True
t.start()
return t
def unsubscribe(self):
self.unsubscribed = True
logger.info(f"Unsubscribed from channel: {self.sub}")
try:
self.sub.close()
except Exception as e:
logger.warning(f'Unsubscribe msg error: {e}')
def retry(self, _next, error, complete):
logger.info('Retry subscribe channel: {}'.format(self.ch))
times = 0
while True:
try:
self.unsubscribe()
self.pb.resubscribe(_next, error, complete)
break
except Exception as e:
logger.error('Retry #{} {} subscribe channel error: {}'.format(times, self.ch, e))
times += 1
time.sleep(times * 2)

View File

@@ -121,7 +121,7 @@
"AppletHelpText": "In the upload process, if the application does not exist, create the application; if it exists, update the application.",
"AppletHostCreate": "Add RemoteApp machine",
"AppletHostDetail": "RemoteApp machine",
"AppletHostSelectHelpMessage": "When connecting to an asset, the selection of the application publishing machine is random (but the last used one is preferred). if you want to assign a specific publishing machine to an asset, you can tag it as [发布机: publishing machine name] [AppletHost: publishing machine name] [仅发布机: publishing machine name] [AppletHostOnly: publishing machine name]; <br>when selecting an account for the publishing machine, the following situations will choose the user's own <b>account with the same name or proprietary account (starting with js)</b>, otherwise use a public account (starting with jms):<br>&nbsp; 1. both the publishing machine and application support concurrent;<br>&nbsp; 2. the publishing machine supports concurrent, but the application does not, and the current application does not use a proprietary account;<br>&nbsp; 3. the publishing machine does not support concurrent, the application either supports or does not support concurrent, and no application uses a proprietary account;<br> note: whether the application supports concurrent connections is decided by the developer, and whether the host supports concurrent connections is decided by the single user single session setting in the publishing machine configuration",
"AppletHostSelectHelpMessage": "When connecting to an asset, the selection of the application publishing machine is random (but the last used one is preferred). if you want to assign a specific publishing machine to an asset, you can tag it as [publishing machine: publishing machine name] or [AppletHost: publishing machine name]; <br>when selecting an account for the publishing machine, the following situations will choose the user's own <b>account with the same name or proprietary account (starting with js)</b>, otherwise use a public account (starting with jms):<br>&nbsp; 1. both the publishing machine and application support concurrent;<br>&nbsp; 2. the publishing machine supports concurrent, but the application does not, and the current application does not use a proprietary account;<br>&nbsp; 3. the publishing machine does not support concurrent, the application either supports or does not support concurrent, and no application uses a proprietary account;<br> note: whether the application supports concurrent connections is decided by the developer, and whether the host supports concurrent connections is decided by the single user single session setting in the publishing machine configuration",
"AppletHostUpdate": "Update the remote app publishing machine",
"AppletHostZoneHelpText": "This domain belongs to the system organization",
"AppletHosts": "RemoteApp machine",

View File

@@ -122,7 +122,7 @@
"AppletHelpText": "在上传过程中,如果应用不存在,则创建该应用;如果已存在,则进行应用更新。",
"AppletHostCreate": "添加远程应用发布机",
"AppletHostDetail": "远程应用发布机详情",
"AppletHostSelectHelpMessage": "连接资产时,应用发布机选择是随机的(但优先选择上次使用的),如果想为某个资产固定发布机,可以指定标签 <发布机:发布机名称>、<AppletHost:发布机名称>、<仅发布机:发布机名称>、 <AppletHostOnly:发布机名称>; <br>连接该发布机选择账号时,以下情况会选择用户的 <b>同名账号 或 专有账号(js开头)</b>,否则使用公用账号(jms开头)<br>&nbsp; 1. 发布机和应用都支持并发; <br>&nbsp; 2. 发布机支持并发,应用不支持并发,当前应用没有使用专有账号; <br>&nbsp; 3. 发布机不支持并发,应用支持并发或不支持,没有任一应用使用专有账号; <br> 注意: 应用支不支持并发是开发者决定,主机支不支持是发布机配置中的 单用户单会话决定",
"AppletHostSelectHelpMessage": "连接资产时,应用发布机选择是随机的(但优先选择上次使用的),如果想为某个资产固定发布机,可以指定标签 <发布机:发布机名称> <AppletHost:发布机名称>; <br>连接该发布机选择账号时,以下情况会选择用户的 <b>同名账号 或 专有账号(js开头)</b>,否则使用公用账号(jms开头)<br>&nbsp; 1. 发布机和应用都支持并发; <br>&nbsp; 2. 发布机支持并发,应用不支持并发,当前应用没有使用专有账号; <br>&nbsp; 3. 发布机不支持并发,应用支持并发或不支持,没有任一应用使用专有账号; <br> 注意: 应用支不支持并发是开发者决定,主机支不支持是发布机配置中的 单用户单会话决定",
"AppletHostUpdate": "更新远程应用发布机",
"AppletHostZoneHelpText": "这里的网域属于 System 组织",
"AppletHosts": "应用发布机",

View File

@@ -729,12 +729,6 @@ class Config(dict):
'LOKI_BASE_URL': 'http://loki:3100',
'TOOL_USER_ENABLED': False,
# Suggestion api
'SUGGESTION_LIMIT': 10,
# MCP
'MCP_ENABLED': False,
}
old_config_map = {

View File

@@ -266,6 +266,3 @@ LOKI_LOG_ENABLED = CONFIG.LOKI_LOG_ENABLED
LOKI_BASE_URL = CONFIG.LOKI_BASE_URL
TOOL_USER_ENABLED = CONFIG.TOOL_USER_ENABLED
SUGGESTION_LIMIT = CONFIG.SUGGESTION_LIMIT
MCP_ENABLED = CONFIG.MCP_ENABLED

View File

@@ -38,12 +38,12 @@ REST_FRAMEWORK = {
),
'DEFAULT_FILTER_BACKENDS': (
'django_filters.rest_framework.DjangoFilterBackend',
'common.drf.filters.SearchFilter',
'rest_framework.filters.SearchFilter',
'common.drf.filters.RewriteOrderingFilter',
),
'DEFAULT_METADATA_CLASS': 'common.drf.metadata.SimpleMetadataWithFilters',
'ORDERING_PARAM': "order",
'SEARCH_PARAM': "q",
'SEARCH_PARAM': "search",
'DATETIME_FORMAT': '%Y/%m/%d %H:%M:%S %z',
'DATETIME_INPUT_FORMATS': ['%Y/%m/%d %H:%M:%S %z', 'iso-8601', '%Y-%m-%d %H:%M:%S %z'],
'DEFAULT_PAGINATION_CLASS': 'jumpserver.rewriting.pagination.MaxLimitOffsetPagination',

View File

@@ -35,14 +35,11 @@ resource_api = [
api_v1 = resource_api + [
path('prometheus/metrics/', api.PrometheusMetricsApi.as_view()),
path('resources/', api.ResourceTypeListApi.as_view(), name='resource-list'),
path('resources/<str:resource>/', api.ResourceListApi.as_view()),
path('resources/<str:resource>/<str:pk>/', api.ResourceDetailApi.as_view()),
path('search/', api.GlobalSearchView.as_view()),
]
if settings.MCP_ENABLED:
api_v1.extend([
path('resources/', api.ResourceTypeListApi.as_view(), name='resource-list'),
path('resources/<str:resource>/', api.ResourceListApi.as_view()),
path('resources/<str:resource>/<str:pk>/', api.ResourceDetailApi.as_view()),
])
app_view_patterns = [
path('auth/', include('authentication.urls.view_urls'), name='auth'),

View File

@@ -152,7 +152,7 @@ class UploadFileRunner:
host_pattern="*",
inventory=self.inventory,
module='copy',
module_args=f"src={self.src_paths}/ dest={self.dest_path}/",
module_args=f"src={self.src_paths}/ dest={self.dest_path}",
verbosity=verbosity,
event_handler=self.cb.event_handler,
status_handler=self.cb.status_handler,

View File

@@ -24,7 +24,5 @@ class OrgMixin:
@sync_to_async
def has_perms(self, user, perms):
self.cookie = self.get_cookie()
self.org = self.get_current_org()
with tmp_to_org(self.org):
return user.has_perms(perms)

View File

@@ -56,6 +56,8 @@ class ToolsWebsocket(AsyncJsonWebsocketConsumer, OrgMixin):
async def connect(self):
user = self.scope["user"]
if user.is_authenticated:
self.cookie = self.get_cookie()
self.org = self.get_current_org()
has_perm = self.has_perms(user, ['rbac.view_systemtools'])
if await self.is_superuser(user) or (settings.TOOL_USER_ENABLED and has_perm):
await self.accept()
@@ -126,14 +128,14 @@ class ToolsWebsocket(AsyncJsonWebsocketConsumer, OrgMixin):
close_old_connections()
class LdapWebsocket(AsyncJsonWebsocketConsumer, OrgMixin):
class LdapWebsocket(AsyncJsonWebsocketConsumer):
category: str
async def connect(self):
user = self.scope["user"]
query = parse_qs(self.scope['query_string'].decode())
self.category = query.get('category', [User.Source.ldap.value])[0]
if user.is_authenticated and await self.has_perms(user, ['settings.view_setting']):
if user.is_authenticated:
await self.accept()
else:
await self.close()
@@ -164,6 +166,8 @@ class LdapWebsocket(AsyncJsonWebsocketConsumer, OrgMixin):
config = {
'server_uri': serializer.validated_data.get(f"{prefix}SERVER_URI"),
'bind_dn': serializer.validated_data.get(f"{prefix}BIND_DN"),
'password': (serializer.validated_data.get(f"{prefix}BIND_PASSWORD") or
getattr(settings, f"{prefix}BIND_PASSWORD")),
'use_ssl': serializer.validated_data.get(f"{prefix}START_TLS", False),
'search_ou': serializer.validated_data.get(f"{prefix}SEARCH_OU"),
'search_filter': serializer.validated_data.get(f"{prefix}SEARCH_FILTER"),
@@ -171,12 +175,6 @@ class LdapWebsocket(AsyncJsonWebsocketConsumer, OrgMixin):
'auth_ldap': serializer.validated_data.get(f"{prefix.rstrip('_')}", False)
}
password = serializer.validated_data.get(f"{prefix}BIND_PASSWORD")
if not password and config['server_uri'] == getattr(settings, f"{prefix}SERVER_URI"):
# 只有在没有修改服务器地址的情况下,才使用原有的密码
config['password'] = getattr(settings, f"{prefix}BIND_PASSWORD")
else:
config['password'] = password
return config
@staticmethod

View File

@@ -41,8 +41,8 @@ class UserViewSet(CommonApiMixin, UserQuerysetMixin, SuggestionMixin, BulkModelV
permission_classes = [RBACPermission, UserObjectPermission]
serializer_classes = {
'default': UserSerializer,
'suggestion': MiniUserSerializer,
'invite': InviteSerializer,
'match': MiniUserSerializer,
'retrieve': UserRetrieveSerializer,
}
rbac_perms = {

View File

@@ -93,10 +93,10 @@ dependencies = [
'celery==5.3.1',
'flower==2.0.1',
'django-celery-beat==2.6.0',
'kombu==5.3.5',
'kombu==5.3.1',
'uvicorn==0.22.0',
'websockets==11.0.3',
'python-ldap==3.4.5',
'python-ldap==3.4.3',
'ldap3==2.9.1',
'django-radius',
'django-cas-ng',

10
uv.lock generated
View File

@@ -1,5 +1,5 @@
version = 1
revision = 3
revision = 2
requires-python = ">=3.11"
resolution-markers = [
"python_full_version >= '3.14'",
@@ -2553,7 +2553,7 @@ requires-dist = [
{ name = "itypes", specifier = "==1.2.0" },
{ name = "jinja2", specifier = "==3.1.6" },
{ name = "jsonfield2", specifier = "==4.0.0.post0" },
{ name = "kombu", specifier = "==5.3.5" },
{ name = "kombu", specifier = "==5.3.1" },
{ name = "ldap3", specifier = "==2.9.1" },
{ name = "lxml", specifier = "==5.2.1" },
{ name = "markupsafe", specifier = "==2.1.3" },
@@ -2671,15 +2671,15 @@ wheels = [
[[package]]
name = "kombu"
version = "5.3.5"
version = "5.3.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "amqp" },
{ name = "vine" },
]
sdist = { url = "https://files.pythonhosted.org/packages/55/61/0b91085837d446570ea12f63f79463e5a74b449956b1ca9d1946a6f584c2/kombu-5.3.5.tar.gz", hash = "sha256:30e470f1a6b49c70dc6f6d13c3e4cc4e178aa6c469ceb6bcd55645385fc84b93", size = 438460, upload-time = "2024-01-12T19:55:54.982Z" }
sdist = { url = "https://files.pythonhosted.org/packages/c8/69/b703f8ec8d0406be22534dad885cac847fe092b793c4893034e3308feb9b/kombu-5.3.1.tar.gz", hash = "sha256:fbd7572d92c0bf71c112a6b45163153dea5a7b6a701ec16b568c27d0fd2370f2", size = 434284, upload-time = "2023-06-15T13:16:22.683Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/88/daca086d72832c74a7e239558ad484644c8cda0b9ae8a690f247bf13c268/kombu-5.3.5-py3-none-any.whl", hash = "sha256:0eac1bbb464afe6fb0924b21bf79460416d25d8abc52546d4f16cad94f789488", size = 200001, upload-time = "2024-01-12T19:55:51.59Z" },
{ url = "https://files.pythonhosted.org/packages/63/58/b23b9c1ffb30d8b5cdfc7bdecb17bfd7ea20c619e86e515297b496177144/kombu-5.3.1-py3-none-any.whl", hash = "sha256:48ee589e8833126fd01ceaa08f8a2041334e9f5894e5763c8486a550454551e9", size = 198498, upload-time = "2023-06-15T13:16:14.57Z" },
]
[[package]]