From e9f591b33be6fb8cef81478f6b9201949f2bfd56 Mon Sep 17 00:00:00 2001 From: ibuler Date: Mon, 18 Mar 2024 14:19:15 +0800 Subject: [PATCH] =?UTF-8?q?perf:=20=E4=BC=98=E5=8C=96=20ops=20task?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- apps/ops/api/adhoc.py | 3 --- apps/ops/api/job.py | 49 +++++++++++++++++++++++----------------- apps/ops/api/playbook.py | 16 ++++++------- apps/ops/tasks.py | 11 ++++++++- 4 files changed, 46 insertions(+), 33 deletions(-) diff --git a/apps/ops/api/adhoc.py b/apps/ops/api/adhoc.py index 9be055101..70b74bed7 100644 --- a/apps/ops/api/adhoc.py +++ b/apps/ops/api/adhoc.py @@ -17,9 +17,6 @@ class AdHocViewSet(OrgBulkModelViewSet): search_fields = ('name', 'comment') model = AdHoc - def allow_bulk_destroy(self, qs, filtered): - return True - def get_queryset(self): queryset = super().get_queryset() return queryset.filter(creator=self.request.user) diff --git a/apps/ops/api/job.py b/apps/ops/api/job.py index d71e62828..91ffea79d 100644 --- a/apps/ops/api/job.py +++ b/apps/ops/api/job.py @@ -1,6 +1,5 @@ import json import os -from psutil import NoSuchProcess from celery.result import AsyncResult from django.conf import settings @@ -20,7 +19,9 @@ from common.permissions import IsValidUser from ops.celery import app from ops.const import Types from ops.models import Job, JobExecution -from ops.serializers.job import JobSerializer, JobExecutionSerializer, FileSerializer, JobTaskStopSerializer +from ops.serializers.job import ( + JobSerializer, JobExecutionSerializer, FileSerializer, JobTaskStopSerializer +) __all__ = [ 'JobViewSet', 'JobExecutionViewSet', 'JobRunVariableHelpAPIView', 'JobExecutionTaskDetail', 'UsernameHintsAPI' @@ -43,16 +44,17 @@ def set_task_to_serializer_data(serializer, task_id): def merge_nodes_and_assets(nodes, assets, user): - if nodes: - perm_util = UserPermAssetUtil(user=user) - for node_id in nodes: - if node_id == PermNode.FAVORITE_NODE_KEY: - node_assets = perm_util.get_favorite_assets() - elif node_id == PermNode.UNGROUPED_NODE_KEY: - node_assets = perm_util.get_ungroup_assets() - else: - node, node_assets = perm_util.get_node_all_assets(node_id) - assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets])) + if not nodes: + return assets + perm_util = UserPermAssetUtil(user=user) + for node_id in nodes: + if node_id == PermNode.FAVORITE_NODE_KEY: + node_assets = perm_util.get_favorite_assets() + elif node_id == PermNode.UNGROUPED_NODE_KEY: + node_assets = perm_util.get_ungroup_assets() + else: + _, node_assets = perm_util.get_node_all_assets(node_id) + assets.extend(node_assets.exclude(id__in=[asset.id for asset in assets])) return assets @@ -70,12 +72,13 @@ class JobViewSet(OrgBulkModelViewSet): return self.permission_denied(request, "Command execution disabled") return super().check_permissions(request) - def allow_bulk_destroy(self, qs, filtered): - return True - def get_queryset(self): queryset = super().get_queryset() - queryset = queryset.filter(creator=self.request.user).exclude(type=Types.upload_file) + queryset = queryset \ + .filter(creator=self.request.user) \ + .exclude(type=Types.upload_file) + + # Job 列表不显示 adhoc, retrieve 要取状态 if self.action != 'retrieve': return queryset.filter(instant=False) return queryset @@ -83,10 +86,11 @@ class JobViewSet(OrgBulkModelViewSet): def perform_create(self, serializer): run_after_save = serializer.validated_data.pop('run_after_save', False) node_ids = serializer.validated_data.pop('nodes', []) - assets = serializer.validated_data.__getitem__('assets') + assets = serializer.validated_data.get('assets') assets = merge_nodes_and_assets(node_ids, assets, self.request.user) - serializer.validated_data.__setitem__('assets', assets) + serializer.validated_data['assets'] = assets instance = serializer.save() + if instance.instant or run_after_save: self.run_job(instance, serializer) @@ -103,7 +107,10 @@ class JobViewSet(OrgBulkModelViewSet): set_task_to_serializer_data(serializer, execution.id) transaction.on_commit( - lambda: run_ops_job_execution.apply_async((str(execution.id),), task_id=str(execution.id))) + lambda: run_ops_job_execution.apply_async( + (str(execution.id),), task_id=str(execution.id) + ) + ) @staticmethod def get_duplicates_files(files): @@ -124,8 +131,8 @@ class JobViewSet(OrgBulkModelViewSet): exceeds_limit_files.append(file) return exceeds_limit_files - @action(methods=[POST], detail=False, serializer_class=FileSerializer, permission_classes=[IsValidUser, ], - url_path='upload') + @action(methods=[POST], detail=False, serializer_class=FileSerializer, + permission_classes=[IsValidUser, ], url_path='upload') def upload(self, request, *args, **kwargs): uploaded_files = request.FILES.getlist('files') serializer = self.get_serializer(data=request.data) diff --git a/apps/ops/api/playbook.py b/apps/ops/api/playbook.py index c49c6c345..38e18bdfb 100644 --- a/apps/ops/api/playbook.py +++ b/apps/ops/api/playbook.py @@ -52,26 +52,26 @@ class PlaybookViewSet(OrgBulkModelViewSet): if 'multipart/form-data' in self.request.headers['Content-Type']: src_path = safe_join(settings.MEDIA_ROOT, instance.path.name) dest_path = safe_join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__()) + try: unzip_playbook(src_path, dest_path) - except RuntimeError as e: + except RuntimeError: raise JMSException(code='invalid_playbook_file', detail={"msg": "Unzip failed"}) if 'main.yml' not in os.listdir(dest_path): raise PlaybookNoValidEntry - else: - if instance.create_method == 'blank': - dest_path = safe_join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__()) - os.makedirs(dest_path) - with open(safe_join(dest_path, 'main.yml'), 'w') as f: - f.write('## write your playbook here') + elif instance.create_method == 'blank': + dest_path = safe_join(settings.DATA_DIR, "ops", "playbook", instance.id.__str__()) + os.makedirs(dest_path) + with open(safe_join(dest_path, 'main.yml'), 'w') as f: + f.write('## write your playbook here') class PlaybookFileBrowserAPIView(APIView): permission_classes = (RBACPermission,) rbac_perms = { - 'GET': 'ops.change_playbook', + 'GET': 'ops.view_playbook', 'POST': 'ops.change_playbook', 'DELETE': 'ops.change_playbook', 'PATCH': 'ops.change_playbook', diff --git a/apps/ops/tasks.py b/apps/ops/tasks.py index 5242d2812..57c57a0d8 100644 --- a/apps/ops/tasks.py +++ b/apps/ops/tasks.py @@ -1,10 +1,11 @@ # coding: utf-8 import datetime +import time from celery import shared_task from celery.exceptions import SoftTimeLimitExceeded -from django.utils.translation import gettext_lazy as _ from django.utils import timezone +from django.utils.translation import gettext_lazy as _ from django_celery_beat.models import PeriodicTask from common.const.crontab import CRONTAB_AT_AM_TWO @@ -143,3 +144,11 @@ def clean_job_execution_period(): with tmp_to_root_org(): del_res = JobExecution.objects.filter(date_created__lt=expired_day).delete() logger.info(f"clean job_execution db record success! delete {days} days {del_res[0]} records") + + +@shared_task +def longtime_add(x, y): + print('long time task begins') + time.sleep(50) + print('long time task finished') + return x + y