perf: add server size limit

This commit is contained in:
ibuler
2026-03-03 18:03:08 +08:00
parent c1d6f4b7e5
commit 0bc34cfdf7
4 changed files with 46 additions and 46 deletions

View File

@@ -1,3 +1,5 @@
import os
import psutil
import multiprocessing
from django.core.management.base import BaseCommand
@@ -7,10 +9,21 @@ from .hands import *
from .utils import ServicesUtil
SERVER_SIZE = os.environ.get('SERVER_SIZE', 'auto')
if SERVER_SIZE == 'auto':
cpu_count = psutil.cpu_count()
mem_total = psutil.virtual_memory().total / 1024 / 1024 / 1024
if cpu_count < 4 or mem_total < 7:
SERVER_SIZE = 'small'
else:
SERVER_SIZE = 'large'
class Services(TextChoices):
gunicorn = 'gunicorn', 'gunicorn'
celery_ansible = 'celery_ansible', 'celery_ansible'
celery_default = 'celery_default', 'celery_default'
celery_mix = 'celery_mix', 'celery_mix'
beat = 'beat', 'beat'
flower = 'flower', 'flower'
ws = 'ws', 'ws'
@@ -27,17 +40,24 @@ class Services(TextChoices):
cls.flower: services.FlowerService,
cls.celery_default: services.CeleryDefaultService,
cls.celery_ansible: services.CeleryAnsibleService,
cls.celery_mix: services.CeleryMixService,
cls.beat: services.BeatService,
}
return services_map.get(name)
@classmethod
def web_services(cls):
return [cls.gunicorn, cls.flower]
if SERVER_SIZE == 'small':
return [cls.gunicorn]
else:
return [cls.gunicorn, cls.flower]
@classmethod
def celery_services(cls):
return [cls.celery_ansible, cls.celery_default]
if SERVER_SIZE == 'small':
return [cls.celery_mix]
else:
return [cls.celery_ansible, cls.celery_default]
@classmethod
def task_services(cls):
@@ -103,8 +123,11 @@ class BaseActionCommand(BaseCommand):
def initial_util(self, *args, **options):
service_names = options.get('services')
worker = options.get('worker')
if SERVER_SIZE == 'small':
worker = '1'
service_kwargs = {
'worker_gunicorn': options.get('worker')
'worker_gunicorn': worker
}
services = Services.get_service_objects(service_names=service_names, **service_kwargs)

View File

@@ -1,5 +1,6 @@
from .beat import *
from .celery_ansible import *
from .celery_default import *
from .celery_mix import *
from .flower import *
from .gunicorn import *

View File

@@ -0,0 +1,16 @@
from .celery_base import CeleryBaseService
__all__ = ['CeleryMixService']
class CeleryMixService(CeleryBaseService):
def __init__(self, **kwargs):
kwargs['queue'] = 'celery,ansible'
super().__init__(**kwargs)
def start_other(self):
from terminal.startup import CeleryTerminal
celery_terminal = CeleryTerminal()
celery_terminal.start_heartbeat_thread()

View File

@@ -5,58 +5,18 @@ import signal
import subprocess
import sys
import redis_lock
from redis import Redis, Sentinel, ConnectionPool
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
APPS_DIR = os.path.join(BASE_DIR, 'apps')
CERTS_DIR = os.path.join(BASE_DIR, 'data', 'certs')
sys.path.insert(0, APPS_DIR)
from jumpserver import settings
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jumpserver.settings')
os.environ.setdefault('PYTHONOPTIMIZE', '1')
if os.getuid() == 0:
os.environ.setdefault('C_FORCE_ROOT', '1')
connection_params = {}
if settings.REDIS_USE_SSL:
connection_params['ssl'] = settings.REDIS_USE_SSL
connection_params['ssl_cert_reqs'] = settings.REDIS_SSL_REQUIRED
connection_params['ssl_keyfile'] = settings.REDIS_SSL_KEY
connection_params['ssl_certfile'] = settings.REDIS_SSL_CERT
connection_params['ssl_ca_certs'] = settings.REDIS_SSL_CA
REDIS_SENTINEL_SERVICE_NAME = settings.REDIS_SENTINEL_SERVICE_NAME
REDIS_SENTINELS = settings.REDIS_SENTINELS
REDIS_SENTINEL_PASSWORD = settings.REDIS_SENTINEL_PASSWORD
REDIS_SENTINEL_SOCKET_TIMEOUT = settings.REDIS_SENTINEL_SOCKET_TIMEOUT
if REDIS_SENTINEL_SERVICE_NAME and REDIS_SENTINELS:
connection_params['sentinels'] = REDIS_SENTINELS
connection_params['password'] = settings.REDIS_PASSWORD
sentinel_client = Sentinel(
**connection_params, sentinel_kwargs={
'ssl': settings.REDIS_USE_SSL,
'ssl_cert_reqs': settings.REDIS_SSL_REQUIRED,
'ssl_keyfile': settings.REDIS_SSL_KEY,
'ssl_certfile': settings.REDIS_SSL_CERT,
'ssl_ca_certs': settings.REDIS_SSL_CA,
'password': REDIS_SENTINEL_PASSWORD,
'socket_timeout': REDIS_SENTINEL_SOCKET_TIMEOUT
}
)
redis_client = sentinel_client.master_for(REDIS_SENTINEL_SERVICE_NAME)
else:
REDIS_PROTOCOL = 'rediss' if connection_params.pop('ssl', False) else 'redis'
REDIS_LOCATION_NO_DB = '%(protocol)s://:%(password)s@%(host)s:%(port)s' % {
'protocol': REDIS_PROTOCOL,
'password': settings.REDIS_PASSWORD_QUOTE,
'host': settings.REDIS_HOST,
'port': settings.REDIS_PORT,
}
pool = ConnectionPool.from_url(REDIS_LOCATION_NO_DB, **connection_params)
redis_client = Redis(connection_pool=pool)
from django.core.cache import cache
scheduler = "django_celery_beat.schedulers:DatabaseScheduler"
processes = []
@@ -79,7 +39,7 @@ def main():
# 父进程结束通知子进程结束
signal.signal(signal.SIGTERM, stop_beat_process)
with redis_lock.Lock(redis_client, name="beat-distribute-start-lock", expire=60, auto_renewal=True):
with cache.lock("beat-distribute-start-lock", expire=60, auto_renewal=True):
print("Get beat lock start to run it")
process = subprocess.Popen(cmd, cwd=APPS_DIR)
processes.append(process)