mirror of
https://github.com/jumpserver/jumpserver.git
synced 2025-12-26 05:52:36 +00:00
Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1c11eb2d7 |
2
.github/workflows/release-drafter.yml
vendored
2
.github/workflows/release-drafter.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
TAG=$(basename ${GITHUB_REF})
|
||||
VERSION=${TAG/v/}
|
||||
wget https://raw.githubusercontent.com/jumpserver/installer/master/quick_start.sh
|
||||
sed -i "s@VERSION=dev@VERSION=v${VERSION}@g" quick_start.sh
|
||||
sed -i "s@Version=.*@Version=v${VERSION}@g" quick_start.sh
|
||||
echo "::set-output name=TAG::$TAG"
|
||||
echo "::set-output name=VERSION::$VERSION"
|
||||
- name: Create Release
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM python:3.8-slim-bullseye as stage-build
|
||||
FROM python:3.8-slim as stage-build
|
||||
ARG TARGETARCH
|
||||
|
||||
ARG VERSION
|
||||
@@ -8,7 +8,7 @@ WORKDIR /opt/jumpserver
|
||||
ADD . .
|
||||
RUN cd utils && bash -ixeu build.sh
|
||||
|
||||
FROM python:3.8-slim-bullseye
|
||||
FROM python:3.8-slim
|
||||
ARG TARGETARCH
|
||||
MAINTAINER JumpServer Team <ibuler@qq.com>
|
||||
|
||||
@@ -38,7 +38,6 @@ ARG TOOLS=" \
|
||||
default-mysql-client \
|
||||
iputils-ping \
|
||||
locales \
|
||||
patch \
|
||||
procps \
|
||||
redis-tools \
|
||||
telnet \
|
||||
@@ -88,10 +87,8 @@ RUN --mount=type=cache,target=/root/.cache/pip \
|
||||
&& pip config set global.index-url ${PIP_MIRROR} \
|
||||
&& pip install --upgrade pip \
|
||||
&& pip install --upgrade setuptools wheel \
|
||||
&& pip install Cython==0.29.35 \
|
||||
&& pip install --no-build-isolation pymssql \
|
||||
&& pip install $(grep -E 'jms|jumpserver' requirements/requirements.txt) -i ${PIP_JMS_MIRROR} \
|
||||
&& pip install -r requirements/requirements.txt --use-deprecated=legacy-resolver
|
||||
&& pip install -r requirements/requirements.txt
|
||||
|
||||
COPY --from=stage-build /opt/jumpserver/release/jumpserver /opt/jumpserver
|
||||
RUN echo > /opt/jumpserver/config.yml \
|
||||
|
||||
2
GITSHA
2
GITSHA
@@ -1 +1 @@
|
||||
29ab6bbee7f1ac2dee1826e42b65039f260f6fd0
|
||||
8a77a7b8b57c5b0fb8455f3b34fc9b6f3c3a24c1
|
||||
|
||||
@@ -2,8 +2,9 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import logging
|
||||
from functools import reduce
|
||||
from collections import OrderedDict
|
||||
|
||||
from django.db import models
|
||||
@@ -13,6 +14,7 @@ from rest_framework.exceptions import ValidationError
|
||||
from common.db.fields import JsonDictTextField
|
||||
from common.utils import lazyproperty
|
||||
from orgs.mixins.models import OrgModelMixin, OrgManager
|
||||
|
||||
from .base import AbsConnectivity
|
||||
|
||||
__all__ = ['Asset', 'ProtocolsMixin', 'Platform', 'AssetQuerySet']
|
||||
|
||||
@@ -4,16 +4,16 @@
|
||||
|
||||
import logging
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.core.validators import MinValueValidator, MaxValueValidator
|
||||
from django.core.cache import cache
|
||||
|
||||
from common.utils import signer, get_object_or_none, is_uuid
|
||||
from .base import BaseUser
|
||||
from .asset import Asset
|
||||
from .authbook import AuthBook
|
||||
from .base import BaseUser
|
||||
|
||||
|
||||
__all__ = ['AdminUser', 'SystemUser']
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -187,22 +187,22 @@ class AuthMixin:
|
||||
if username == '':
|
||||
username = self.username
|
||||
|
||||
not_stu_query = Q(asset=asset, username=username, systemuser__isnull=True)
|
||||
stu_query = Q(asset=asset, systemuser=self)
|
||||
not_stu_qs = AuthBook.objects.filter(not_stu_query).order_by('-date_created')
|
||||
stu_qs = AuthBook.objects.filter(stu_query).order_by('-date_created')
|
||||
authbook = AuthBook.objects.filter(
|
||||
asset=asset, username=username, systemuser__isnull=True
|
||||
).order_by('-date_created').first()
|
||||
|
||||
authbook = not_stu_qs.first()
|
||||
if not authbook:
|
||||
authbook = stu_qs.first()
|
||||
authbook = AuthBook.objects.filter(
|
||||
asset=asset, systemuser=self
|
||||
).order_by('-date_created').first()
|
||||
|
||||
if not authbook:
|
||||
return None
|
||||
authbook.load_auth()
|
||||
|
||||
self.password = authbook.password or self.password or ''
|
||||
self.private_key = authbook.private_key or self.private_key or ''
|
||||
self.public_key = authbook.public_key or self.public_key or ''
|
||||
authbook.load_auth()
|
||||
self.password = authbook.password
|
||||
self.private_key = authbook.private_key
|
||||
self.public_key = authbook.public_key
|
||||
|
||||
def load_asset_more_auth(self, asset_id=None, username=None, user_id=None):
|
||||
from users.models import User
|
||||
@@ -249,19 +249,12 @@ class SystemUser(ProtocolMixin, AuthMixin, BaseUser):
|
||||
users = models.ManyToManyField('users.User', blank=True, verbose_name=_("Users"))
|
||||
groups = models.ManyToManyField('users.UserGroup', blank=True, verbose_name=_("User groups"))
|
||||
type = models.CharField(max_length=16, choices=Type.choices, default=Type.common, verbose_name=_('Type'))
|
||||
priority = models.IntegerField(
|
||||
default=81, verbose_name=_("Priority"),
|
||||
help_text=_("1-100, the lower the value will be match first"),
|
||||
validators=[MinValueValidator(1), MaxValueValidator(100)]
|
||||
)
|
||||
protocol = models.CharField(max_length=16, choices=ProtocolMixin.Protocol.choices, default='ssh',
|
||||
verbose_name=_('Protocol'))
|
||||
priority = models.IntegerField(default=81, verbose_name=_("Priority"), help_text=_("1-100, the lower the value will be match first"), validators=[MinValueValidator(1), MaxValueValidator(100)])
|
||||
protocol = models.CharField(max_length=16, choices=ProtocolMixin.Protocol.choices, default='ssh', verbose_name=_('Protocol'))
|
||||
auto_push = models.BooleanField(default=True, verbose_name=_('Auto push'))
|
||||
sudo = models.TextField(default='/bin/whoami', verbose_name=_('Sudo'))
|
||||
shell = models.CharField(max_length=64, default='/bin/bash', verbose_name=_('Shell'))
|
||||
login_mode = models.CharField(
|
||||
choices=LOGIN_MODE_CHOICES, default=LOGIN_AUTO, max_length=10, verbose_name=_('Login mode')
|
||||
)
|
||||
shell = models.CharField(max_length=64, default='/bin/bash', verbose_name=_('Shell'))
|
||||
login_mode = models.CharField(choices=LOGIN_MODE_CHOICES, default=LOGIN_AUTO, max_length=10, verbose_name=_('Login mode'))
|
||||
sftp_root = models.CharField(default='tmp', max_length=128, verbose_name=_("SFTP Root"))
|
||||
token = models.TextField(default='', verbose_name=_('Token'))
|
||||
home = models.CharField(max_length=4096, default='', verbose_name=_('Home'), blank=True)
|
||||
@@ -269,9 +262,7 @@ class SystemUser(ProtocolMixin, AuthMixin, BaseUser):
|
||||
ad_domain = models.CharField(default='', max_length=256)
|
||||
# linux su 命令 (switch user)
|
||||
su_enabled = models.BooleanField(default=False, verbose_name=_('User switch'))
|
||||
su_from = models.ForeignKey(
|
||||
'self', on_delete=models.SET_NULL, related_name='su_to', null=True, verbose_name=_("Switch from")
|
||||
)
|
||||
su_from = models.ForeignKey('self', on_delete=models.SET_NULL, related_name='su_to', null=True, verbose_name=_("Switch from"))
|
||||
|
||||
def __str__(self):
|
||||
username = self.username
|
||||
|
||||
@@ -125,7 +125,6 @@ class ConnectionTokenMixin:
|
||||
'bookmarktype:i': '3',
|
||||
'use redirection server name:i': '0',
|
||||
'smart sizing:i': '1',
|
||||
'disableconnectionsharing:i': '1',
|
||||
# 'drivestoredirect:s': '*',
|
||||
# 'domain:s': ''
|
||||
# 'alternate shell:s:': '||MySQLWorkbench',
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
from django.conf import settings
|
||||
from django.contrib.auth import get_user_model
|
||||
from django.conf import settings
|
||||
|
||||
from common.permissions import ServiceAccountSignaturePermission
|
||||
from .base import JMSBaseAuthBackend
|
||||
|
||||
UserModel = get_user_model()
|
||||
@@ -19,10 +18,6 @@ class PublicKeyAuthBackend(JMSBaseAuthBackend):
|
||||
def authenticate(self, request, username=None, public_key=None, **kwargs):
|
||||
if not public_key:
|
||||
return None
|
||||
|
||||
permission = ServiceAccountSignaturePermission()
|
||||
if not permission.has_permission(request, None):
|
||||
return None
|
||||
if username is None:
|
||||
username = kwargs.get(UserModel.USERNAME_FIELD)
|
||||
try:
|
||||
@@ -31,7 +26,7 @@ class PublicKeyAuthBackend(JMSBaseAuthBackend):
|
||||
return None
|
||||
else:
|
||||
if user.check_public_key(public_key) and \
|
||||
self.user_can_authenticate(user):
|
||||
self.user_can_authenticate(user):
|
||||
return user
|
||||
|
||||
def get_user(self, user_id):
|
||||
|
||||
@@ -212,8 +212,7 @@ class MFAMixin:
|
||||
self._do_check_user_mfa(code, mfa_type, user=user)
|
||||
|
||||
def check_user_mfa_if_need(self, user):
|
||||
if self.request.session.get('auth_mfa') and \
|
||||
self.request.session.get('auth_mfa_username') == user.username:
|
||||
if self.request.session.get('auth_mfa'):
|
||||
return
|
||||
if not user.mfa_enabled:
|
||||
return
|
||||
@@ -221,16 +220,15 @@ class MFAMixin:
|
||||
active_mfa_names = user.active_mfa_backends_mapper.keys()
|
||||
raise errors.MFARequiredError(mfa_types=tuple(active_mfa_names))
|
||||
|
||||
def mark_mfa_ok(self, mfa_type, user):
|
||||
def mark_mfa_ok(self, mfa_type):
|
||||
self.request.session['auth_mfa'] = 1
|
||||
self.request.session['auth_mfa_username'] = user.username
|
||||
self.request.session['auth_mfa_time'] = time.time()
|
||||
self.request.session['auth_mfa_required'] = 0
|
||||
self.request.session['auth_mfa_type'] = mfa_type
|
||||
MFABlockUtils(user.username, self.get_request_ip()).clean_failed_count()
|
||||
MFABlockUtils(self.request.user.username, self.get_request_ip()).clean_failed_count()
|
||||
|
||||
def clean_mfa_mark(self):
|
||||
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type', 'auth_mfa_username']
|
||||
keys = ['auth_mfa', 'auth_mfa_time', 'auth_mfa_required', 'auth_mfa_type']
|
||||
for k in keys:
|
||||
self.request.session.pop(k, '')
|
||||
|
||||
@@ -265,7 +263,7 @@ class MFAMixin:
|
||||
ok, msg = mfa_backend.check_code(code)
|
||||
|
||||
if ok:
|
||||
self.mark_mfa_ok(mfa_type, user)
|
||||
self.mark_mfa_ok(mfa_type)
|
||||
return
|
||||
|
||||
raise errors.MFAFailedError(
|
||||
|
||||
@@ -81,38 +81,3 @@ class UserConfirmation(permissions.BasePermission):
|
||||
min_level = ConfirmType.values.index(confirm_type) + 1
|
||||
name = 'UserConfirmationLevel{}TTL{}'.format(min_level, ttl)
|
||||
return type(name, (cls,), {'min_level': min_level, 'ttl': ttl, 'confirm_type': confirm_type})
|
||||
|
||||
|
||||
class ServiceAccountSignaturePermission(permissions.BasePermission):
|
||||
def has_permission(self, request, view):
|
||||
from authentication.models import AccessKey
|
||||
from common.utils.crypto import get_aes_crypto
|
||||
signature = request.META.get('HTTP_X_JMS_SVC', '')
|
||||
if not signature or not signature.startswith('Sign'):
|
||||
return False
|
||||
data = signature[4:].strip()
|
||||
if not data or ':' not in data:
|
||||
return False
|
||||
ak_id, time_sign = data.split(':', 1)
|
||||
if not ak_id or not time_sign:
|
||||
return False
|
||||
ak = AccessKey.objects.filter(id=ak_id).first()
|
||||
if not ak or not ak.is_active:
|
||||
return False
|
||||
if not ak.user or not ak.user.is_active or not ak.user.is_service_account:
|
||||
return False
|
||||
aes = get_aes_crypto(str(ak.secret).replace('-', ''), mode='ECB')
|
||||
try:
|
||||
timestamp = aes.decrypt(time_sign)
|
||||
if not timestamp or not timestamp.isdigit():
|
||||
return False
|
||||
timestamp = int(timestamp)
|
||||
interval = abs(int(time.time()) - timestamp)
|
||||
if interval > 30:
|
||||
return False
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
return False
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import struct
|
||||
import random
|
||||
import socket
|
||||
import string
|
||||
import struct
|
||||
|
||||
|
||||
string_punctuation = '!#$%&()*+,-.:;<=>?@[]^_~'
|
||||
|
||||
@@ -18,7 +19,6 @@ def random_ip():
|
||||
|
||||
|
||||
def random_string(length: int, lower=True, upper=True, digit=True, special_char=False):
|
||||
random.seed()
|
||||
args_names = ['lower', 'upper', 'digit', 'special_char']
|
||||
args_values = [lower, upper, digit, special_char]
|
||||
args_string = [string.ascii_lowercase, string.ascii_uppercase, string.digits, string_punctuation]
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
from celery import shared_task
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.core.mail import send_mail
|
||||
from celery import shared_task
|
||||
|
||||
from common.exceptions import JMSException
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from common.sdk.sms.exceptions import CodeError, CodeExpired, CodeSendTooFrequently
|
||||
from common.utils import get_logger
|
||||
from common.sdk.sms.endpoint import SMS
|
||||
from common.exceptions import JMSException
|
||||
from common.utils.random import random_string
|
||||
from common.utils import get_logger
|
||||
|
||||
|
||||
logger = get_logger(__file__)
|
||||
|
||||
@@ -26,7 +27,6 @@ class SendAndVerifyCodeUtil(object):
|
||||
self.timeout = timeout
|
||||
self.backend = backend
|
||||
self.key = key or self.KEY_TMPL.format(target)
|
||||
self.verify_key = self.key + '_verify'
|
||||
self.other_args = kwargs
|
||||
|
||||
def gen_and_send_async(self):
|
||||
@@ -47,11 +47,6 @@ class SendAndVerifyCodeUtil(object):
|
||||
raise
|
||||
|
||||
def verify(self, code):
|
||||
times = cache.get(self.verify_key, 0)
|
||||
if times >= 3:
|
||||
self.__clear()
|
||||
raise CodeExpired
|
||||
cache.set(self.verify_key, times + 1, timeout=self.timeout)
|
||||
right = cache.get(self.key)
|
||||
if not right:
|
||||
raise CodeExpired
|
||||
@@ -64,7 +59,6 @@ class SendAndVerifyCodeUtil(object):
|
||||
|
||||
def __clear(self):
|
||||
cache.delete(self.key)
|
||||
cache.delete(self.verify_key)
|
||||
|
||||
def __ttl(self):
|
||||
return cache.ttl(self.key)
|
||||
|
||||
@@ -24,7 +24,7 @@ __all__ = [
|
||||
|
||||
class TaskViewSet(OrgBulkModelViewSet):
|
||||
model = Task
|
||||
filterset_fields = ("name", "adhoc__execution__celery_task_id")
|
||||
filterset_fields = ("name",)
|
||||
search_fields = filterset_fields
|
||||
serializer_class = TaskSerializer
|
||||
|
||||
@@ -54,7 +54,6 @@ class TaskRun(generics.RetrieveAPIView):
|
||||
|
||||
class AdHocViewSet(viewsets.ModelViewSet):
|
||||
queryset = AdHoc.objects.all()
|
||||
filterset_fields = ('execution__celery_task_id', )
|
||||
serializer_class = AdHocSerializer
|
||||
|
||||
def get_serializer_class(self):
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import logging
|
||||
from celery.utils.log import get_logger
|
||||
from django.db import close_old_connections
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.db.utils import DatabaseError, InterfaceError
|
||||
|
||||
from django_celery_beat.schedulers import DatabaseScheduler as DJDatabaseScheduler
|
||||
|
||||
logger = get_logger(__name__)
|
||||
debug, info, warning = logger.debug, logger.info, logger.warning
|
||||
|
||||
|
||||
__all__ = ['DatabaseScheduler']
|
||||
|
||||
|
||||
class DatabaseScheduler(DJDatabaseScheduler):
|
||||
|
||||
def sync(self):
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
debug('Writing entries...')
|
||||
_tried = set()
|
||||
_failed = set()
|
||||
try:
|
||||
close_old_connections()
|
||||
|
||||
while self._dirty:
|
||||
name = self._dirty.pop()
|
||||
try:
|
||||
# 源码
|
||||
# self.schedule[name].save()
|
||||
# _tried.add(name)
|
||||
|
||||
"""
|
||||
::Debug Description (2023.07.10)::
|
||||
|
||||
如果调用 self.schedule 可能会导致 self.save() 方法之前重新获取数据库中的数据, 而不是临时设置的 last_run_at 数据
|
||||
|
||||
如果这里调用 self.schedule
|
||||
那么可能会导致调用 save 的 self.schedule[name] 的 last_run_at 是从数据库中获取回来的老数据
|
||||
而不是任务执行后临时设置的 last_run_at (在 __next__() 方法中设置的)
|
||||
当 `max_interval` 间隔之后, 下一个任务检测周期还是会再次执行任务
|
||||
|
||||
::Demo::
|
||||
任务信息:
|
||||
beat config: max_interval = 60s
|
||||
|
||||
任务名称: cap
|
||||
任务执行周期: 每 3 分钟执行一次
|
||||
任务最后执行时间: 18:00
|
||||
|
||||
任务第一次执行: 18:03 (执行时设置 last_run_at = 18:03, 此时在内存中)
|
||||
|
||||
任务执行完成后,
|
||||
检测到需要 sync, sync 中调用了 self.schedule,
|
||||
self.schedule 中发现 schedule_changed() 为 True, 需要调用 all_as_schedule()
|
||||
此时,sync 中调用的 self.schedule[name] 的 last_run_at 是 18:00
|
||||
这时候在 self.sync() 进行 self.save()
|
||||
|
||||
|
||||
beat: Waking up 60s ...
|
||||
|
||||
任务第二次执行: 18:04 (因为获取回来的 last_run_at 是 18:00, entry.is_due() == True)
|
||||
|
||||
::解决方法::
|
||||
所以这里为了避免从数据库中获取,直接使用 _schedule #
|
||||
"""
|
||||
self._schedule[name].save()
|
||||
_tried.add(name)
|
||||
except (KeyError, TypeError, ObjectDoesNotExist):
|
||||
_failed.add(name)
|
||||
except DatabaseError as exc:
|
||||
logger.exception('Database error while sync: %r', exc)
|
||||
except InterfaceError:
|
||||
warning(
|
||||
'DatabaseScheduler: InterfaceError in sync(), '
|
||||
'waiting to retry in next call...'
|
||||
)
|
||||
finally:
|
||||
# retry later, only for the failed ones
|
||||
self._dirty |= _failed
|
||||
@@ -17,7 +17,7 @@ class AdHocExecutionSerializer(serializers.ModelSerializer):
|
||||
fields_mini = ['id']
|
||||
fields_small = fields_mini + [
|
||||
'hosts_amount', 'timedelta', 'result', 'summary', 'short_id',
|
||||
'is_finished', 'is_success', 'celery_task_id',
|
||||
'is_finished', 'is_success',
|
||||
'date_start', 'date_finished',
|
||||
]
|
||||
fields_fk = ['task', 'task_display', 'adhoc', 'adhoc_short_id',]
|
||||
|
||||
@@ -149,7 +149,7 @@ class BuiltinRole:
|
||||
'User': cls.system_user.get_role(),
|
||||
'Auditor': cls.system_auditor.get_role()
|
||||
}
|
||||
return cls.system_role_mapper.get(name, cls.system_role_mapper['User'])
|
||||
return cls.system_role_mapper[name]
|
||||
|
||||
@classmethod
|
||||
def get_org_role_by_old_name(cls, name):
|
||||
@@ -159,7 +159,7 @@ class BuiltinRole:
|
||||
'User': cls.org_user.get_role(),
|
||||
'Auditor': cls.org_auditor.get_role(),
|
||||
}
|
||||
return cls.org_role_mapper.get(name, cls.org_role_mapper['User'])
|
||||
return cls.org_role_mapper[name]
|
||||
|
||||
@classmethod
|
||||
def sync_to_db(cls, show_msg=False):
|
||||
|
||||
@@ -79,7 +79,7 @@ class Endpoint(JMSModel):
|
||||
return None
|
||||
endpoints = cls.objects.filter(name__in=values).order_by('-date_updated')
|
||||
for endpoint in endpoints:
|
||||
if endpoint.is_valid_for(instance, protocol):
|
||||
if endpoint.is_valid_for(protocol):
|
||||
return endpoint
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import html2text
|
||||
from django.template.loader import render_to_string
|
||||
from html import escape
|
||||
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
from common.utils import get_logger
|
||||
from tickets.const import TicketState, TicketType
|
||||
from tickets.utils import (
|
||||
send_ticket_processed_mail_to_applicant,
|
||||
send_ticket_applied_mail_to_assignees
|
||||
)
|
||||
from tickets.const import TicketState, TicketType
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
@@ -97,7 +98,7 @@ class BaseHandler:
|
||||
approve_info = _('{} {} the ticket').format(user_display, state_display)
|
||||
context = self._diff_prev_approve_context(state)
|
||||
context.update({'approve_info': approve_info})
|
||||
body = self.safe_html_script(
|
||||
body = self.reject_html_script(
|
||||
render_to_string('tickets/ticket_approve_diff.html', context)
|
||||
)
|
||||
data = {
|
||||
@@ -110,7 +111,6 @@ class BaseHandler:
|
||||
return self.ticket.comments.create(**data)
|
||||
|
||||
@staticmethod
|
||||
def safe_html_script(unsafe_html):
|
||||
unsafe_html = unsafe_html.replace('\n', '')
|
||||
html_str = html2text.html2text(unsafe_html)
|
||||
return html_str
|
||||
def reject_html_script(unsafe_html):
|
||||
safe_html = escape(unsafe_html)
|
||||
return safe_html
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from django.db.models import Model
|
||||
from django.db.transaction import atomic
|
||||
from django.db.models import Model
|
||||
from django.utils.translation import ugettext as _
|
||||
from rest_framework import serializers
|
||||
|
||||
@@ -69,6 +69,8 @@ class BaseApplyAssetApplicationSerializer(serializers.Serializer):
|
||||
error = _('The expiration date should be greater than the start date')
|
||||
raise serializers.ValidationError({'apply_date_expired': error})
|
||||
|
||||
attrs['apply_date_start'] = apply_date_start
|
||||
attrs['apply_date_expired'] = apply_date_expired
|
||||
return attrs
|
||||
|
||||
@atomic
|
||||
|
||||
@@ -18,6 +18,7 @@
|
||||
{% endfor %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
|
||||
</table>
|
||||
</div>
|
||||
{% endif %}
|
||||
|
||||
@@ -1,27 +1,30 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
import base64
|
||||
import datetime
|
||||
import uuid
|
||||
import base64
|
||||
import string
|
||||
import random
|
||||
import datetime
|
||||
from typing import Callable
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.hashers import check_password
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.core.cache import cache
|
||||
from django.db import models
|
||||
from django.shortcuts import reverse
|
||||
from django.conf import settings
|
||||
from django.utils import timezone
|
||||
from django.utils.module_loading import import_string
|
||||
from django.core.cache import cache
|
||||
from django.contrib.auth.models import AbstractUser
|
||||
from django.contrib.auth.hashers import check_password
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.shortcuts import reverse
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from orgs.utils import current_org
|
||||
from orgs.models import Organization
|
||||
from rbac.const import Scope
|
||||
from common.db import fields, models as jms_models
|
||||
from common.utils import (
|
||||
date_expired_default, get_logger, lazyproperty, random_string, bulk_create_with_signal
|
||||
)
|
||||
from orgs.utils import current_org
|
||||
from rbac.const import Scope
|
||||
from ..signals import post_user_change_password, post_user_leave_org, pre_user_leave_org
|
||||
|
||||
__all__ = ['User', 'UserPasswordHistory']
|
||||
@@ -515,7 +518,8 @@ class TokenMixin:
|
||||
return self.access_keys.first()
|
||||
|
||||
def generate_reset_token(self):
|
||||
token = random_string(50)
|
||||
letter = string.ascii_letters + string.digits
|
||||
token = ''.join([random.choice(letter) for _ in range(50)])
|
||||
self.set_cache(token)
|
||||
return token
|
||||
|
||||
|
||||
@@ -81,6 +81,7 @@ def check_user_expired_periodic():
|
||||
|
||||
|
||||
@shared_task
|
||||
@transaction.atomic
|
||||
def import_ldap_user():
|
||||
logger.info("Start import ldap user task")
|
||||
util_server = LDAPServerUtil()
|
||||
|
||||
@@ -128,9 +128,9 @@ kubernetes==21.7.0
|
||||
# DB requirements
|
||||
mysqlclient==2.1.0
|
||||
PyMySQL==1.0.2
|
||||
oracledb==1.2.2
|
||||
oracledb==1.0.1
|
||||
psycopg2-binary==2.9.1
|
||||
# pymssql==2.2.7
|
||||
pymssql==2.1.5
|
||||
django-mysql==3.9.0
|
||||
django-redis==5.2.0
|
||||
python-redis-lock==3.7.0
|
||||
|
||||
@@ -54,7 +54,7 @@ else:
|
||||
connection_params['port'] = settings.REDIS_PORT
|
||||
redis_client = Redis(**connection_params)
|
||||
|
||||
scheduler = "ops.celery.beat.schedulers:DatabaseScheduler"
|
||||
scheduler = "django_celery_beat.schedulers:DatabaseScheduler"
|
||||
processes = []
|
||||
cmd = [
|
||||
'celery',
|
||||
|
||||
Reference in New Issue
Block a user