perf: 优化Activity日志

This commit is contained in:
jiangweidong 2023-02-07 08:52:48 +08:00
parent bf867f8c95
commit 90fdaca955
26 changed files with 239 additions and 113 deletions

View File

@ -53,7 +53,9 @@ class AccountViewSet(OrgBulkModelViewSet):
account = super().get_object() account = super().get_object()
account_ids = [account.id] account_ids = [account.id]
asset_ids = [account.asset_id] asset_ids = [account.asset_id]
task = verify_accounts_connectivity.delay(account_ids, asset_ids) task = verify_accounts_connectivity.delay(
account_ids, asset_ids, user=request.user
)
return Response(data={'task': task.id}) return Response(data={'task': task.id})

View File

@ -110,6 +110,7 @@ class AutomationExecutionViewSet(
serializer.is_valid(raise_exception=True) serializer.is_valid(raise_exception=True)
automation = serializer.validated_data.get('automation') automation = serializer.validated_data.get('automation')
task = execute_automation.delay( task = execute_automation.delay(
pid=automation.pk, trigger=Trigger.manual, tp=self.tp pid=automation.pk, trigger=Trigger.manual,
tp=self.tp, user=request.user
) )
return Response({'task': task.id}, status=status.HTTP_201_CREATED) return Response({'task': task.id}, status=status.HTTP_201_CREATED)

View File

@ -35,7 +35,7 @@ class AutomationExecution(AssetAutomationExecution):
('add_pushaccountexecution', _('Can add push account execution')), ('add_pushaccountexecution', _('Can add push account execution')),
] ]
def start(self): def start(self, **kwargs):
from accounts.automations.endpoint import ExecutionManager from accounts.automations.endpoint import ExecutionManager
manager = ExecutionManager(execution=self) manager = ExecutionManager(execution=self)
return manager.run() return manager.run(**kwargs)

View File

@ -9,7 +9,7 @@ logger = get_logger(__file__)
@shared_task(queue='ansible', verbose_name=_('Account execute automation')) @shared_task(queue='ansible', verbose_name=_('Account execute automation'))
def execute_automation(pid, trigger, tp): def execute_automation(pid, trigger, tp, **kwargs):
model = AutomationTypes.get_type_model(tp) model = AutomationTypes.get_type_model(tp)
with tmp_to_root_org(): with tmp_to_root_org():
instance = get_object_or_none(model, pk=pid) instance = get_object_or_none(model, pk=pid)
@ -17,4 +17,4 @@ def execute_automation(pid, trigger, tp):
logger.error("No automation task found: {}".format(pid)) logger.error("No automation task found: {}".format(pid))
return return
with tmp_to_org(instance.org): with tmp_to_org(instance.org):
instance.execute(trigger) instance.execute(trigger, **kwargs)

View File

@ -5,7 +5,7 @@ from assets.tasks.common import generate_data
from common.const.choices import Trigger from common.const.choices import Trigger
def automation_execute_start(task_name, tp, child_snapshot=None): def automation_execute_start(task_name, tp, child_snapshot=None, **kwargs):
from accounts.models import AutomationExecution from accounts.models import AutomationExecution
data = generate_data(task_name, tp, child_snapshot) data = generate_data(task_name, tp, child_snapshot)
@ -19,4 +19,4 @@ def automation_execute_start(task_name, tp, child_snapshot=None):
execution = AutomationExecution.objects.create( execution = AutomationExecution.objects.create(
trigger=Trigger.manual, **data trigger=Trigger.manual, **data
) )
execution.start() execution.start(**kwargs)

View File

@ -14,7 +14,7 @@ __all__ = [
] ]
def verify_connectivity_util(assets, tp, accounts, task_name): def verify_connectivity_util(assets, tp, accounts, task_name, **kwargs):
if not assets or not accounts: if not assets or not accounts:
return return
account_usernames = list(accounts.values_list('username', flat=True)) account_usernames = list(accounts.values_list('username', flat=True))
@ -22,28 +22,30 @@ def verify_connectivity_util(assets, tp, accounts, task_name):
'accounts': account_usernames, 'accounts': account_usernames,
'assets': [str(asset.id) for asset in assets], 'assets': [str(asset.id) for asset in assets],
} }
automation_execute_start(task_name, tp, child_snapshot) automation_execute_start(task_name, tp, child_snapshot, **kwargs)
@org_aware_func("assets") @org_aware_func("assets")
def verify_accounts_connectivity_util(accounts, assets, task_name): def verify_accounts_connectivity_util(accounts, assets, task_name, **kwargs):
gateway_assets = assets.filter(platform__name=GATEWAY_NAME) gateway_assets = assets.filter(platform__name=GATEWAY_NAME)
verify_connectivity_util( verify_connectivity_util(
gateway_assets, AutomationTypes.verify_gateway_account, accounts, task_name gateway_assets, AutomationTypes.verify_gateway_account,
accounts, task_name, **kwargs
) )
non_gateway_assets = assets.exclude(platform__name=GATEWAY_NAME) non_gateway_assets = assets.exclude(platform__name=GATEWAY_NAME)
verify_connectivity_util( verify_connectivity_util(
non_gateway_assets, AutomationTypes.verify_account, accounts, task_name non_gateway_assets, AutomationTypes.verify_account,
accounts, task_name, **kwargs
) )
@shared_task(queue="ansible", verbose_name=_('Verify asset account availability')) @shared_task(queue="ansible", verbose_name=_('Verify asset account availability'))
def verify_accounts_connectivity(account_ids, asset_ids): def verify_accounts_connectivity(account_ids, asset_ids, **kwargs):
from assets.models import Asset from assets.models import Asset
from accounts.models import Account, VerifyAccountAutomation from accounts.models import Account, VerifyAccountAutomation
assets = Asset.objects.filter(id__in=asset_ids) assets = Asset.objects.filter(id__in=asset_ids)
accounts = Account.objects.filter(id__in=account_ids) accounts = Account.objects.filter(id__in=account_ids)
task_name = gettext_noop("Verify accounts connectivity") task_name = gettext_noop("Verify accounts connectivity")
task_name = VerifyAccountAutomation.generate_unique_name(task_name) task_name = VerifyAccountAutomation.generate_unique_name(task_name)
return verify_accounts_connectivity_util(accounts, assets, task_name) return verify_accounts_connectivity_util(accounts, assets, task_name, **kwargs)

View File

@ -3,6 +3,7 @@
import django_filters import django_filters
from django.db.models import Q from django.db.models import Q
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from rest_framework.request import Request
from rest_framework.decorators import action from rest_framework.decorators import action
from rest_framework.response import Response from rest_framework.response import Response
@ -105,14 +106,21 @@ class AssetViewSet(SuggestionMixin, NodeFilterMixin, OrgBulkModelViewSet):
class AssetsTaskMixin: class AssetsTaskMixin:
request: Request
def perform_assets_task(self, serializer): def perform_assets_task(self, serializer):
data = serializer.validated_data data = serializer.validated_data
assets = data.get("assets", []) assets = data.get("assets", [])
asset_ids = [asset.id for asset in assets] asset_ids = [asset.id for asset in assets]
user = self.request.user
if data["action"] == "refresh": if data["action"] == "refresh":
task = update_assets_hardware_info_manual.delay(asset_ids) task = update_assets_hardware_info_manual.delay(
asset_ids, user=user
)
else: else:
task = test_assets_connectivity_manual.delay(asset_ids) task = test_assets_connectivity_manual.delay(
asset_ids, user=user
)
return task return task
def perform_create(self, serializer): def perform_create(self, serializer):

View File

@ -10,6 +10,9 @@ from django.utils import timezone
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
from assets.automations.methods import platform_automation_methods from assets.automations.methods import platform_automation_methods
from audits.signals import post_activity_log
from audits.const import ActivityChoices
from common.utils import reverse
from common.utils import get_logger, lazyproperty from common.utils import get_logger, lazyproperty
from common.utils import ssh_pubkey_gen, ssh_key_string_to_obj from common.utils import ssh_pubkey_gen, ssh_key_string_to_obj
from ops.ansible import JMSInventory, PlaybookRunner, DefaultCallback from ops.ansible import JMSInventory, PlaybookRunner, DefaultCallback
@ -118,7 +121,22 @@ class BasePlaybookManager:
yaml.safe_dump(plays, f) yaml.safe_dump(plays, f)
return sub_playbook_path return sub_playbook_path
def get_runners(self): def send_activity(self, assets, **kwargs):
user = kwargs.pop('user', _('Unknown'))
task_type = self.method_type().label
detail = 'User %s performs a task(%s) for this resource.' % (
user, task_type
)
detail_url = reverse(
'ops:celery-task-log', kwargs={'pk': self.execution.id}
)
for a in assets:
post_activity_log.send(
sender=self, resource_id=a.id, detail=detail,
detail_url=detail_url, type=ActivityChoices.task
)
def get_runners(self, **kwargs):
runners = [] runners = []
for platform, assets in self.get_assets_group_by_platform().items(): for platform, assets in self.get_assets_group_by_platform().items():
assets_bulked = [assets[i:i + self.bulk_size] for i in range(0, len(assets), self.bulk_size)] assets_bulked = [assets[i:i + self.bulk_size] for i in range(0, len(assets), self.bulk_size)]
@ -137,6 +155,7 @@ class BasePlaybookManager:
callback=PlaybookCallback(), callback=PlaybookCallback(),
) )
runners.append(runer) runners.append(runer)
self.send_activity(assets, **kwargs)
return runners return runners
def on_host_success(self, host, result): def on_host_success(self, host, result):
@ -166,7 +185,7 @@ class BasePlaybookManager:
pass pass
def run(self, *args, **kwargs): def run(self, *args, **kwargs):
runners = self.get_runners() runners = self.get_runners(user=kwargs.pop('user'))
if len(runners) > 1: if len(runners) > 1:
print("### 分批次执行开始任务, 总共 {}\n".format(len(runners))) print("### 分批次执行开始任务, 总共 {}\n".format(len(runners)))
else: else:

View File

@ -76,7 +76,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
def executed_amount(self): def executed_amount(self):
return self.executions.count() return self.executions.count()
def execute(self, trigger=Trigger.manual): def execute(self, trigger=Trigger.manual, **kwargs):
try: try:
eid = current_task.request.id eid = current_task.request.id
except AttributeError: except AttributeError:
@ -86,7 +86,7 @@ class BaseAutomation(PeriodTaskModelMixin, JMSOrgBaseModel):
id=eid, trigger=trigger, automation=self, id=eid, trigger=trigger, automation=self,
snapshot=self.to_attr_json(), snapshot=self.to_attr_json(),
) )
return execution.start() return execution.start(**kwargs)
class AssetBaseAutomation(BaseAutomation): class AssetBaseAutomation(BaseAutomation):
@ -140,7 +140,7 @@ class AutomationExecution(OrgModelMixin):
return {} return {}
return recipients return recipients
def start(self): def start(self, **kwargs):
from assets.automations.endpoint import ExecutionManager from assets.automations.endpoint import ExecutionManager
manager = ExecutionManager(execution=self) manager = ExecutionManager(execution=self)
return manager.run() return manager.run(**kwargs)

View File

@ -29,7 +29,7 @@ def generate_data(task_name, tp, child_snapshot=None):
return {'id': eid, 'snapshot': snapshot} return {'id': eid, 'snapshot': snapshot}
def automation_execute_start(task_name, tp, child_snapshot=None): def automation_execute_start(task_name, tp, child_snapshot=None, **kwargs):
from assets.models import AutomationExecution from assets.models import AutomationExecution
data = generate_data(task_name, tp, child_snapshot) data = generate_data(task_name, tp, child_snapshot)
@ -43,4 +43,4 @@ def automation_execute_start(task_name, tp, child_snapshot=None):
execution = AutomationExecution.objects.create( execution = AutomationExecution.objects.create(
trigger=Trigger.manual, **data trigger=Trigger.manual, **data
) )
execution.start() execution.start(**kwargs)

View File

@ -17,7 +17,7 @@ __all__ = [
] ]
def update_fact_util(assets=None, nodes=None, task_name=None): def update_fact_util(assets=None, nodes=None, task_name=None, **kwargs):
from assets.models import GatherFactsAutomation from assets.models import GatherFactsAutomation
if task_name is None: if task_name is None:
task_name = gettext_noop("Update some assets hardware info. ") task_name = gettext_noop("Update some assets hardware info. ")
@ -30,16 +30,16 @@ def update_fact_util(assets=None, nodes=None, task_name=None):
'nodes': [str(node.id) for node in nodes], 'nodes': [str(node.id) for node in nodes],
} }
tp = AutomationTypes.gather_facts tp = AutomationTypes.gather_facts
automation_execute_start(task_name, tp, child_snapshot) automation_execute_start(task_name, tp, child_snapshot, **kwargs)
@org_aware_func('assets') @org_aware_func('assets')
def update_assets_fact_util(assets=None, task_name=None): def update_assets_fact_util(assets=None, task_name=None, **kwargs):
if assets is None: if assets is None:
logger.info("No assets to update hardware info") logger.info("No assets to update hardware info")
return return
update_fact_util(assets=assets, task_name=task_name) update_fact_util(assets=assets, task_name=task_name, **kwargs)
@org_aware_func('nodes') @org_aware_func('nodes')
@ -51,11 +51,11 @@ def update_nodes_fact_util(nodes=None, task_name=None):
@shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets')) @shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets'))
def update_assets_hardware_info_manual(asset_ids): def update_assets_hardware_info_manual(asset_ids, **kwargs):
from assets.models import Asset from assets.models import Asset
assets = Asset.objects.filter(id__in=asset_ids) assets = Asset.objects.filter(id__in=asset_ids)
task_name = gettext_noop("Update assets hardware info: ") task_name = gettext_noop("Update assets hardware info: ")
update_assets_fact_util(assets=assets, task_name=task_name) update_assets_fact_util(assets=assets, task_name=task_name, **kwargs)
@shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets under a node')) @shared_task(queue="ansible", verbose_name=_('Manually update the hardware information of assets under a node'))

View File

@ -17,7 +17,7 @@ __all__ = [
] ]
def test_connectivity_util(assets, tp, task_name, local_port=None): def test_connectivity_util(assets, tp, task_name, local_port=None, **kwargs):
if not assets: if not assets:
return return
@ -27,11 +27,11 @@ def test_connectivity_util(assets, tp, task_name, local_port=None):
child_snapshot = {'local_port': local_port} child_snapshot = {'local_port': local_port}
child_snapshot['assets'] = [str(asset.id) for asset in assets] child_snapshot['assets'] = [str(asset.id) for asset in assets]
automation_execute_start(task_name, tp, child_snapshot) automation_execute_start(task_name, tp, child_snapshot, **kwargs)
@org_aware_func('assets') @org_aware_func('assets')
def test_asset_connectivity_util(assets, task_name=None, local_port=None): def test_asset_connectivity_util(assets, task_name=None, local_port=None, **kwargs):
from assets.models import PingAutomation from assets.models import PingAutomation
if task_name is None: if task_name is None:
task_name = gettext_noop("Test assets connectivity ") task_name = gettext_noop("Test assets connectivity ")
@ -40,19 +40,23 @@ def test_asset_connectivity_util(assets, task_name=None, local_port=None):
gateway_assets = assets.filter(platform__name=GATEWAY_NAME) gateway_assets = assets.filter(platform__name=GATEWAY_NAME)
test_connectivity_util( test_connectivity_util(
gateway_assets, AutomationTypes.ping_gateway, task_name, local_port gateway_assets, AutomationTypes.ping_gateway,
task_name, local_port, **kwargs
) )
non_gateway_assets = assets.exclude(platform__name=GATEWAY_NAME) non_gateway_assets = assets.exclude(platform__name=GATEWAY_NAME)
test_connectivity_util(non_gateway_assets, AutomationTypes.ping, task_name) test_connectivity_util(
non_gateway_assets, AutomationTypes.ping,
task_name, **kwargs
)
@shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of a asset')) @shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of a asset'))
def test_assets_connectivity_manual(asset_ids, local_port=None): def test_assets_connectivity_manual(asset_ids, local_port=None, **kwargs):
from assets.models import Asset from assets.models import Asset
assets = Asset.objects.filter(id__in=asset_ids) assets = Asset.objects.filter(id__in=asset_ids)
task_name = gettext_noop("Test assets connectivity ") task_name = gettext_noop("Test assets connectivity ")
test_asset_connectivity_util(assets, task_name, local_port) test_asset_connectivity_util(assets, task_name, local_port, **kwargs)
@shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of assets under a node')) @shared_task(queue="ansible", verbose_name=_('Manually test the connectivity of assets under a node'))

View File

@ -3,6 +3,7 @@
from importlib import import_module from importlib import import_module
from django.conf import settings from django.conf import settings
from django.db.models import F, Value, CharField
from rest_framework import generics from rest_framework import generics
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin
@ -14,7 +15,8 @@ from common.plugins.es import QuerySet as ESQuerySet
from orgs.utils import current_org, tmp_to_root_org from orgs.utils import current_org, tmp_to_root_org
from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet from orgs.mixins.api import OrgGenericViewSet, OrgBulkModelViewSet
from .backends import TYPE_ENGINE_MAPPING from .backends import TYPE_ENGINE_MAPPING
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog from .const import ActivityChoices
from .models import FTPLog, UserLoginLog, OperateLog, PasswordChangeLog, ActivityLog
from .serializers import FTPLogSerializer, UserLoginLogSerializer, JobAuditLogSerializer from .serializers import FTPLogSerializer, UserLoginLogSerializer, JobAuditLogSerializer
from .serializers import ( from .serializers import (
OperateLogSerializer, OperateLogActionDetailSerializer, OperateLogSerializer, OperateLogActionDetailSerializer,
@ -79,15 +81,38 @@ class MyLoginLogAPIView(UserLoginCommonMixin, generics.ListAPIView):
class ResourceActivityAPIView(generics.ListAPIView): class ResourceActivityAPIView(generics.ListAPIView):
serializer_class = ActivitiesOperatorLogSerializer serializer_class = ActivitiesOperatorLogSerializer
rbac_perms = { rbac_perms = {
'GET': 'audits.view_operatelog', 'GET': 'audits.view_activitylog',
} }
def get_queryset(self): @staticmethod
resource_id = self.request.query_params.get('resource_id') def get_operate_log_qs(fields, limit=30, **filters):
with tmp_to_root_org(): queryset = OperateLog.objects.filter(**filters).annotate(
queryset = OperateLog.objects.filter(resource_id=resource_id)[:30] r_type=Value(ActivityChoices.operate_log, CharField()),
r_detail_url=Value(None, CharField()),
r_detail=Value(None, CharField()),
r_user=F('user'), r_action=F('action'),
).values(*fields)[:limit]
return queryset return queryset
@staticmethod
def get_activity_log_qs(fields, limit=30, **filters):
queryset = ActivityLog.objects.filter(**filters).annotate(
r_type=F('type'), r_detail_url=F('detail_url'), r_detail=F('detail'),
r_user=Value(None, CharField()),
r_action=Value(None, CharField()),
).values(*fields)[:limit]
return queryset
def get_queryset(self):
limit = 30
resource_id = self.request.query_params.get('resource_id')
fields = ('id', 'datetime', 'r_detail', 'r_detail_url', 'r_user', 'r_action', 'r_type')
with tmp_to_root_org():
qs1 = self.get_operate_log_qs(fields, resource_id=resource_id)
qs2 = self.get_activity_log_qs(fields, resource_id=resource_id)
queryset = qs2.union(qs1)
return queryset[:limit]
class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet): class OperateLogViewSet(RetrieveModelMixin, ListModelMixin, OrgGenericViewSet):
model = OperateLog model = OperateLog

View File

@ -35,6 +35,13 @@ class LoginTypeChoices(TextChoices):
unknown = "U", _("Unknown") unknown = "U", _("Unknown")
class ActivityChoices(TextChoices):
operate_log = 'O', _('Operate log')
session_log = 'S', _('Session log')
login_log = 'L', _('Login log')
task = 'T', _('Task')
class MFAChoices(IntegerChoices): class MFAChoices(IntegerChoices):
disabled = 0, _("Disabled") disabled = 0, _("Disabled")
enabled = 1, _("Enabled") enabled = 1, _("Enabled")

View File

@ -130,58 +130,6 @@ class OperatorLogHandler(metaclass=Singleton):
after = self.__data_processing(after) after = self.__data_processing(after)
return before, after return before, after
@staticmethod
def _get_Session_params(resource, **kwargs):
# 更新会话的日志不在Activity中体现
# 否则会话结束,录像文件结束操作的会话记录都会体现出来
params = {}
action = kwargs.get('data', {}).get('action', 'create')
detail = _(
'{} used account[{}], login method[{}] login the asset.'
).format(
resource.user, resource.account, resource.login_from_display
)
if action == ActionChoices.create:
params = {
'action': ActionChoices.connect,
'resource_id': str(resource.asset_id),
'user': resource.user, 'detail': detail
}
return params
@staticmethod
def _get_ChangeSecretRecord_params(resource, **kwargs):
detail = _(
'User {} has executed change auth plan for this account.({})'
).format(
resource.created_by, _(resource.status.title())
)
return {
'action': ActionChoices.change_auth, 'detail': detail,
'resource_id': str(resource.account_id),
}
@staticmethod
def _get_UserLoginLog_params(resource, **kwargs):
username = resource.username
login_status = _('Success') if resource.status else _('Failed')
detail = _('User {} login into this service.[{}]').format(
resource.username, login_status
)
user_id = User.objects.filter(username=username).\
values_list('id', flat=True)[0]
return {
'action': ActionChoices.login, 'detail': detail,
'resource_id': str(user_id),
}
def _activity_handle(self, data, object_name, resource):
param_func = getattr(self, '_get_%s_params' % object_name, None)
if param_func is not None:
params = param_func(resource, data=data)
data.update(params)
return data
def create_or_update_operate_log( def create_or_update_operate_log(
self, action, resource_type, resource=None, self, action, resource_type, resource=None,
force=False, log_id=None, before=None, after=None, force=False, log_id=None, before=None, after=None,
@ -204,7 +152,6 @@ class OperatorLogHandler(metaclass=Singleton):
'remote_addr': remote_addr, 'before': before, 'after': after, 'remote_addr': remote_addr, 'before': before, 'after': after,
'org_id': get_current_org_id(), 'resource_id': str(resource.id) 'org_id': get_current_org_id(), 'resource_id': str(resource.id)
} }
data = self._activity_handle(data, object_name, resource=resource)
with transaction.atomic(): with transaction.atomic():
if self.log_client.ping(timeout=1): if self.log_client.ping(timeout=1):
client = self.log_client client = self.log_client

View File

@ -12,6 +12,7 @@ from orgs.utils import current_org
from .const import ( from .const import (
OperateChoices, OperateChoices,
ActionChoices, ActionChoices,
ActivityChoices,
LoginTypeChoices, LoginTypeChoices,
MFAChoices, MFAChoices,
LoginStatusChoices, LoginStatusChoices,
@ -20,6 +21,7 @@ from .const import (
__all__ = [ __all__ = [
"FTPLog", "FTPLog",
"OperateLog", "OperateLog",
"ActivityLog",
"PasswordChangeLog", "PasswordChangeLog",
"UserLoginLog", "UserLoginLog",
] ]
@ -59,7 +61,6 @@ class OperateLog(OrgModelMixin):
remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True) remote_addr = models.CharField(max_length=128, verbose_name=_("Remote addr"), blank=True, null=True)
datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True) datetime = models.DateTimeField(auto_now=True, verbose_name=_('Datetime'), db_index=True)
diff = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True) diff = models.JSONField(default=dict, encoder=ModelJSONFieldEncoder, null=True)
detail = models.CharField(max_length=128, null=True, blank=True, verbose_name=_('Detail'))
def __str__(self): def __str__(self):
return "<{}> {} <{}>".format(self.user, self.action, self.resource) return "<{}> {} <{}>".format(self.user, self.action, self.resource)
@ -93,6 +94,34 @@ class OperateLog(OrgModelMixin):
ordering = ('-datetime',) ordering = ('-datetime',)
class ActivityLog(OrgModelMixin):
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
type = models.CharField(
choices=ActivityChoices.choices, max_length=2,
null=True, default=None, verbose_name=_("Activity type"),
)
resource_id = models.CharField(
max_length=36, blank=True, default='',
db_index=True, verbose_name=_("Resource")
)
datetime = models.DateTimeField(
auto_now=True, verbose_name=_('Datetime'), db_index=True
)
detail = models.TextField(default='', blank=True, verbose_name=_('Detail'))
detail_url = models.CharField(
max_length=256, default=None, null=True, verbose_name=_('Detail url')
)
class Meta:
verbose_name = _("Activity log")
ordering = ('-datetime',)
def save(self, *args, **kwargs):
if current_org.is_root() and not self.org_id:
self.org_id = Organization.ROOT_ID
return super(ActivityLog, self).save(*args, **kwargs)
class PasswordChangeLog(models.Model): class PasswordChangeLog(models.Model):
id = models.UUIDField(default=uuid.uuid4, primary_key=True) id = models.UUIDField(default=uuid.uuid4, primary_key=True)
user = models.CharField(max_length=128, verbose_name=_("User")) user = models.CharField(max_length=128, verbose_name=_("User"))

View File

@ -5,6 +5,7 @@ from rest_framework import serializers
from audits.backends.db import OperateLogStore from audits.backends.db import OperateLogStore
from common.serializers.fields import LabeledChoiceField from common.serializers.fields import LabeledChoiceField
from common.utils import reverse
from common.utils.timezone import as_current_tz from common.utils.timezone import as_current_tz
from ops.models.job import JobAuditLog from ops.models.job import JobAuditLog
from ops.serializers.job import JobExecutionSerializer from ops.serializers.job import JobExecutionSerializer
@ -13,7 +14,7 @@ from . import models
from .const import ( from .const import (
ActionChoices, OperateChoices, ActionChoices, OperateChoices,
MFAChoices, LoginStatusChoices, MFAChoices, LoginStatusChoices,
LoginTypeChoices, LoginTypeChoices, ActivityChoices,
) )
@ -107,17 +108,29 @@ class SessionAuditSerializer(serializers.ModelSerializer):
class ActivitiesOperatorLogSerializer(serializers.Serializer): class ActivitiesOperatorLogSerializer(serializers.Serializer):
timestamp = serializers.SerializerMethodField() timestamp = serializers.SerializerMethodField()
detail_url = serializers.SerializerMethodField()
content = serializers.SerializerMethodField() content = serializers.SerializerMethodField()
@staticmethod @staticmethod
def get_timestamp(obj): def get_timestamp(obj):
return as_current_tz(obj.datetime).strftime('%Y-%m-%d %H:%M:%S') return as_current_tz(obj['datetime']).strftime('%Y-%m-%d %H:%M:%S')
@staticmethod @staticmethod
def get_content(obj): def get_content(obj):
action = obj.action.replace('_', ' ').capitalize() if not obj['r_detail']:
if not obj.detail: action = obj['r_action'].replace('_', ' ').capitalize()
ctn = _('User {} {} this resource.').format(obj.user, _(action)) ctn = _('User {} {} this resource.').format(obj['r_user'], _(action))
else: else:
ctn = obj.detail ctn = obj['r_detail']
return ctn return ctn
@staticmethod
def get_detail_url(obj):
detail_url = obj['r_detail_url']
if obj['r_type'] == ActivityChoices.operate_log:
detail_url = reverse(
view_name='audits:operate-log-detail',
kwargs={'pk': obj['id']},
api_to_ui=True, is_audit=True
)
return detail_url

View File

@ -18,6 +18,7 @@ from audits.handler import (
get_instance_current_with_cache_diff, cache_instance_before_data, get_instance_current_with_cache_diff, cache_instance_before_data,
create_or_update_operate_log, get_instance_dict_from_cache create_or_update_operate_log, get_instance_dict_from_cache
) )
from audits.models import ActivityLog
from audits.utils import model_to_dict_for_operate_log as model_to_dict from audits.utils import model_to_dict_for_operate_log as model_to_dict
from authentication.signals import post_auth_failed, post_auth_success from authentication.signals import post_auth_failed, post_auth_success
from authentication.utils import check_different_city_login_if_need from authentication.utils import check_different_city_login_if_need
@ -34,6 +35,8 @@ from users.signals import post_user_change_password
from . import models, serializers from . import models, serializers
from .const import MODELS_NEED_RECORD, ActionChoices from .const import MODELS_NEED_RECORD, ActionChoices
from .utils import write_login_log from .utils import write_login_log
from .signals import post_activity_log
logger = get_logger(__name__) logger = get_logger(__name__)
sys_logger = get_syslogger(__name__) sys_logger = get_syslogger(__name__)
@ -242,7 +245,7 @@ def get_login_backend(request):
return backend_label return backend_label
def generate_data(username, request, login_type=None): def generate_data(username, request, login_type=None, user_id=None):
user_agent = request.META.get('HTTP_USER_AGENT', '') user_agent = request.META.get('HTTP_USER_AGENT', '')
login_ip = get_request_ip(request) or '0.0.0.0' login_ip = get_request_ip(request) or '0.0.0.0'
@ -255,6 +258,7 @@ def generate_data(username, request, login_type=None):
backend = str(get_login_backend(request)) backend = str(get_login_backend(request))
data = { data = {
'user_id': user_id,
'username': username, 'username': username,
'ip': login_ip, 'ip': login_ip,
'type': login_type, 'type': login_type,
@ -269,7 +273,9 @@ def generate_data(username, request, login_type=None):
def on_user_auth_success(sender, user, request, login_type=None, **kwargs): def on_user_auth_success(sender, user, request, login_type=None, **kwargs):
logger.debug('User login success: {}'.format(user.username)) logger.debug('User login success: {}'.format(user.username))
check_different_city_login_if_need(user, request) check_different_city_login_if_need(user, request)
data = generate_data(user.username, request, login_type=login_type) data = generate_data(
user.username, request, login_type=login_type, user_id=user.id
)
request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S") request.session['login_time'] = data['datetime'].strftime("%Y-%m-%d %H:%M:%S")
data.update({'mfa': int(user.mfa_enabled), 'status': True}) data.update({'mfa': int(user.mfa_enabled), 'status': True})
write_login_log(**data) write_login_log(**data)
@ -286,8 +292,8 @@ def on_user_auth_failed(sender, username, request, reason='', **kwargs):
@receiver(django_ready) @receiver(django_ready)
def on_django_start_set_operate_log_monitor_models(sender, **kwargs): def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
exclude_apps = { exclude_apps = {
'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp', 'django_cas_ng', 'captcha', 'admin', 'jms_oidc_rp', 'audits',
'django_celery_beat', 'contenttypes', 'sessions', 'auth' 'django_celery_beat', 'contenttypes', 'sessions', 'auth',
} }
exclude_models = { exclude_models = {
'UserPasswordHistory', 'ContentType', 'UserPasswordHistory', 'ContentType',
@ -302,7 +308,6 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
'PermedAsset', 'PermedAccount', 'MenuPermission', 'PermedAsset', 'PermedAccount', 'MenuPermission',
'Permission', 'TicketSession', 'ApplyLoginTicket', 'Permission', 'TicketSession', 'ApplyLoginTicket',
'ApplyCommandTicket', 'ApplyLoginAssetTicket', 'ApplyCommandTicket', 'ApplyLoginAssetTicket',
'FTPLog', 'OperateLog', 'PasswordChangeLog'
} }
for i, app in enumerate(apps.get_models(), 1): for i, app in enumerate(apps.get_models(), 1):
app_name = app._meta.app_label app_name = app._meta.app_label
@ -312,3 +317,11 @@ def on_django_start_set_operate_log_monitor_models(sender, **kwargs):
model_name.endswith('Execution'): model_name.endswith('Execution'):
continue continue
MODELS_NEED_RECORD.add(model_name) MODELS_NEED_RECORD.add(model_name)
@receiver(post_activity_log)
def on_activity_log_trigger(sender, **kwargs):
ActivityLog.objects.create(
resource_id=kwargs['resource_id'],
detail=kwargs.get('detail'), detail_url=kwargs.get('detail_url')
)

6
apps/audits/signals.py Normal file
View File

@ -0,0 +1,6 @@
from django.dispatch import Signal
post_activity_log = Signal(
providing_args=('resource_id', 'detail', 'detail_url')
)

View File

@ -7,7 +7,7 @@ from celery import shared_task
from ops.celery.decorator import ( from ops.celery.decorator import (
register_as_period_task register_as_period_task
) )
from .models import UserLoginLog, OperateLog, FTPLog from .models import UserLoginLog, OperateLog, FTPLog, ActivityLog
from common.utils import get_log_keep_day from common.utils import get_log_keep_day
@ -25,6 +25,13 @@ def clean_operation_log_period():
OperateLog.objects.filter(datetime__lt=expired_day).delete() OperateLog.objects.filter(datetime__lt=expired_day).delete()
def clean_activity_log_period():
now = timezone.now()
days = get_log_keep_day('ACTIVITY_LOG_KEEP_DAYS')
expired_day = now - datetime.timedelta(days=days)
ActivityLog.objects.filter(datetime__lt=expired_day).delete()
def clean_ftp_log_period(): def clean_ftp_log_period():
now = timezone.now() now = timezone.now()
days = get_log_keep_day('FTP_LOG_KEEP_DAYS') days = get_log_keep_day('FTP_LOG_KEEP_DAYS')

View File

@ -5,11 +5,14 @@ from itertools import chain
from django.http import HttpResponse from django.http import HttpResponse
from django.db import models from django.db import models
from django.utils.translation import gettext_lazy as _
from audits.const import ActivityChoices
from settings.serializers import SettingsSerializer from settings.serializers import SettingsSerializer
from common.utils import validate_ip, get_ip_city, get_logger from common.utils import validate_ip, get_ip_city, get_logger
from common.db import fields from common.db import fields
from .const import DEFAULT_CITY from .const import DEFAULT_CITY
from .signals import post_activity_log
logger = get_logger(__name__) logger = get_logger(__name__)
@ -44,7 +47,19 @@ def write_login_log(*args, **kwargs):
else: else:
city = get_ip_city(ip) or DEFAULT_CITY city = get_ip_city(ip) or DEFAULT_CITY
kwargs.update({'ip': ip, 'city': city}) kwargs.update({'ip': ip, 'city': city})
UserLoginLog.objects.create(**kwargs) user_id = kwargs.pop('user_id', None)
audit_log = UserLoginLog.objects.create(**kwargs)
# 发送Activity信号
if user_id is not None:
login_status = _('Success') if audit_log.status else _('Failed')
detail = _('User {} login into this service.[{}]').format(
audit_log.username, login_status
)
post_activity_log.send(
sender=UserLoginLog, resource_id=user_id, detail=detail,
type=ActivityChoices.login_log
)
def get_resource_display(resource): def get_resource_display(resource):

View File

@ -512,6 +512,7 @@ class Config(dict):
'LOGIN_LOG_KEEP_DAYS': 200, 'LOGIN_LOG_KEEP_DAYS': 200,
'TASK_LOG_KEEP_DAYS': 90, 'TASK_LOG_KEEP_DAYS': 90,
'OPERATE_LOG_KEEP_DAYS': 200, 'OPERATE_LOG_KEEP_DAYS': 200,
'ACTIVITY_LOG_KEEP_DAYS': 200,
'FTP_LOG_KEEP_DAYS': 200, 'FTP_LOG_KEEP_DAYS': 200,
'CLOUD_SYNC_TASK_EXECUTION_KEEP_DAYS': 30, 'CLOUD_SYNC_TASK_EXECUTION_KEEP_DAYS': 30,

View File

@ -117,6 +117,7 @@ WS_LISTEN_PORT = CONFIG.WS_LISTEN_PORT
LOGIN_LOG_KEEP_DAYS = CONFIG.LOGIN_LOG_KEEP_DAYS LOGIN_LOG_KEEP_DAYS = CONFIG.LOGIN_LOG_KEEP_DAYS
TASK_LOG_KEEP_DAYS = CONFIG.TASK_LOG_KEEP_DAYS TASK_LOG_KEEP_DAYS = CONFIG.TASK_LOG_KEEP_DAYS
OPERATE_LOG_KEEP_DAYS = CONFIG.OPERATE_LOG_KEEP_DAYS OPERATE_LOG_KEEP_DAYS = CONFIG.OPERATE_LOG_KEEP_DAYS
ACTIVITY_LOG_KEEP_DAYS = CONFIG.ACTIVITY_LOG_KEEP_DAYS
FTP_LOG_KEEP_DAYS = CONFIG.FTP_LOG_KEEP_DAYS FTP_LOG_KEEP_DAYS = CONFIG.FTP_LOG_KEEP_DAYS
ORG_CHANGE_TO_URL = CONFIG.ORG_CHANGE_TO_URL ORG_CHANGE_TO_URL = CONFIG.ORG_CHANGE_TO_URL
WINDOWS_SKIP_ALL_MANUAL_PASSWORD = CONFIG.WINDOWS_SKIP_ALL_MANUAL_PASSWORD WINDOWS_SKIP_ALL_MANUAL_PASSWORD = CONFIG.WINDOWS_SKIP_ALL_MANUAL_PASSWORD

View File

@ -79,6 +79,7 @@ exclude_permissions = (
('orgs', 'organizationmember', '*', '*'), ('orgs', 'organizationmember', '*', '*'),
('settings', 'setting', 'add,change,delete', 'setting'), ('settings', 'setting', 'add,change,delete', 'setting'),
('audits', 'operatelog', 'add,delete,change', 'operatelog'), ('audits', 'operatelog', 'add,delete,change', 'operatelog'),
('audits', 'activitylog', 'add,delete,change', 'activitylog'),
('audits', 'passwordchangelog', 'add,change,delete', 'passwordchangelog'), ('audits', 'passwordchangelog', 'add,change,delete', 'passwordchangelog'),
('audits', 'userloginlog', 'add,change,delete,change', 'userloginlog'), ('audits', 'userloginlog', 'add,change,delete,change', 'userloginlog'),
('audits', 'ftplog', 'change,delete', 'ftplog'), ('audits', 'ftplog', 'change,delete', 'ftplog'),

View File

@ -31,4 +31,7 @@ class CleaningSerializer(serializers.Serializer):
min_value=1, max_value=99999, required=True, label=_('Session keep duration'), min_value=1, max_value=99999, required=True, label=_('Session keep duration'),
help_text=_('Unit: days, Session, record, command will be delete if more than duration, only in database') help_text=_('Unit: days, Session, record, command will be delete if more than duration, only in database')
) )
ACTIVITY_LOG_KEEP_DAYS = serializers.IntegerField(
min_value=1, max_value=9999,
label=_("Activity log keep days"), help_text=_("Unit: day")
)

View File

@ -15,6 +15,8 @@ from rest_framework.decorators import action
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response from rest_framework.response import Response
from audits.signals import post_activity_log
from audits.const import ActivityChoices
from common.const.http import GET from common.const.http import GET
from common.drf.filters import DatetimeRangeFilter from common.drf.filters import DatetimeRangeFilter
from common.drf.renders import PassthroughRenderer from common.drf.renders import PassthroughRenderer
@ -120,10 +122,30 @@ class SessionViewSet(OrgBulkModelViewSet):
queryset = queryset.select_for_update() queryset = queryset.select_for_update()
return queryset return queryset
@staticmethod
def send_activity(serializer):
# 发送Activity信号
data = serializer.validated_data
user, asset_id = data['user'], data["asset_id"]
account, login_from = data['account'], data["login_from"]
login_from = Session(login_from=login_from).get_login_from_display()
detail = _(
'{} used account[{}], login method[{}] login the asset.'
).format(
user, account, login_from
)
post_activity_log.send(
sender=Session, resource_id=asset_id, detail=detail,
type=ActivityChoices.session_log
)
def perform_create(self, serializer): def perform_create(self, serializer):
if hasattr(self.request.user, 'terminal'): if hasattr(self.request.user, 'terminal'):
serializer.validated_data["terminal"] = self.request.user.terminal serializer.validated_data["terminal"] = self.request.user.terminal
return super().perform_create(serializer)
resp = super().perform_create(serializer)
self.send_activity(serializer)
return resp
class SessionReplayViewSet(AsyncApiMixin, viewsets.ViewSet): class SessionReplayViewSet(AsyncApiMixin, viewsets.ViewSet):