1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-08-30 21:50:59 +00:00

Merge pull request #6521 from haiwen/event_msg_to_json

event msg to json
This commit is contained in:
JoinTyang 2024-08-19 09:58:45 +08:00 committed by GitHub
commit ae5c17b4bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 60 additions and 24 deletions

View File

@ -114,7 +114,7 @@ class ReposView(APIView):
# do not return virtual repos # do not return virtual repos
if r.is_virtual: if r.is_virtual:
continue continue
if is_wiki_repo(r): if is_wiki_repo(r):
continue continue
url, _, _ = api_avatar_url(email, int(24)) url, _, _ = api_avatar_url(email, int(24))
@ -176,7 +176,7 @@ class ReposView(APIView):
shared_repos.sort(key=lambda x: x.last_modify, reverse=True) shared_repos.sort(key=lambda x: x.last_modify, reverse=True)
for r in shared_repos: for r in shared_repos:
if is_wiki_repo(r): if is_wiki_repo(r):
continue continue
@ -249,11 +249,11 @@ class ReposView(APIView):
monitored_repo_id_list = [] monitored_repo_id_list = []
for r in group_repos: for r in group_repos:
if is_wiki_repo(r): if is_wiki_repo(r):
continue continue
repo_info = { repo_info = {
"type": "group", "type": "group",
"group_id": r.group_id, "group_id": r.group_id,
@ -298,10 +298,10 @@ class ReposView(APIView):
nickname_dict[e] = email2nickname(e) nickname_dict[e] = email2nickname(e)
for r in public_repos: for r in public_repos:
if is_wiki_repo(r): if is_wiki_repo(r):
continue continue
repo_owner = repo_id_owner_dict[r.repo_id] repo_owner = repo_id_owner_dict[r.repo_id]
url, _, _ = api_avatar_url(repo_owner, int(24)) url, _, _ = api_avatar_url(repo_owner, int(24))
repo_info = { repo_info = {
@ -328,8 +328,9 @@ class ReposView(APIView):
utc_dt = datetime.datetime.utcnow() utc_dt = datetime.datetime.utcnow()
timestamp = utc_dt.strftime('%Y-%m-%d %H:%M:%S') timestamp = utc_dt.strftime('%Y-%m-%d %H:%M:%S')
org_id = request.user.org.org_id if is_org_context(request) else -1 org_id = request.user.org.org_id if is_org_context(request) else -1
from seahub.utils import send_user_login_msg
try: try:
seafile_api.publish_event('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id)) send_user_login_msg(email, timestamp, org_id)
except Exception as e: except Exception as e:
logger.error('Error when sending user-login message: %s' % str(e)) logger.error('Error when sending user-login message: %s' % str(e))

View File

@ -1004,8 +1004,9 @@ class Repos(APIView):
if is_org_context(request): if is_org_context(request):
org_id = request.user.org.org_id org_id = request.user.org.org_id
from seahub.utils import send_user_login_msg
try: try:
seafile_api.publish_event('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id)) send_user_login_msg(email, timestamp, org_id)
except Exception as e: except Exception as e:
logger.error('Error when sending user-login message: %s' % str(e)) logger.error('Error when sending user-login message: %s' % str(e))
response = HttpResponse(json.dumps(repos_json), status=200, response = HttpResponse(json.dumps(repos_json), status=200,

View File

@ -2,6 +2,7 @@ import hashlib
import os import os
import logging import logging
import posixpath import posixpath
import json
from seaserv import seafile_api from seaserv import seafile_api
@ -119,9 +120,16 @@ def send_draft_publish_msg(draft, username, path):
repo_id = draft.origin_repo_id repo_id = draft.origin_repo_id
old_path = draft.draft_file_path old_path = draft.draft_file_path
msg = '%s\t%s\t%s\t%s\t%s\t%s' % ("publish", "draft", repo_id, username, path, old_path) msg = {
'msg_type': 'publish',
'obj_type': draft,
'repo_id': repo_id,
'user_name': username,
'path': path,
'old_path': old_path,
}
try: try:
seafile_api.publish_event('seahub.draft', msg) seafile_api.publish_event('seahub.draft', json.dumps(msg))
except Exception as e: except Exception as e:
logger.error("Error when sending draft publish message: %s" % str(e)) logger.error("Error when sending draft publish message: %s" % str(e))

View File

@ -13,6 +13,7 @@ import tempfile
import configparser import configparser
import mimetypes import mimetypes
import contextlib import contextlib
import json
from datetime import datetime from datetime import datetime
from urllib.parse import urlparse, urljoin from urllib.parse import urlparse, urljoin
@ -397,7 +398,7 @@ def get_user_repos(username, org_id=None):
r.id = r.repo_id r.id = r.repo_id
r.name = r.repo_name r.name = r.repo_name
r.last_modify = r.last_modified r.last_modify = r.last_modified
return (owned_repos, shared_repos, groups_repos, public_repos) return (owned_repos, shared_repos, groups_repos, public_repos)
def get_conf_text_ext(): def get_conf_text_ext():
@ -642,7 +643,7 @@ if EVENTS_CONFIG_FILE:
with _get_seafevents_session() as session: with _get_seafevents_session() as session:
res = seafevents_api.get_file_history_by_day(session, repo_id, path, start, count, to_tz, history_limit) res = seafevents_api.get_file_history_by_day(session, repo_id, path, start, count, to_tz, history_limit)
return res return res
def get_file_daily_history_detail(repo_id, path, start_time, end_time, to_tz): def get_file_daily_history_detail(repo_id, path, start_time, end_time, to_tz):
"""Return file histories detail """Return file histories detail
""" """
@ -812,7 +813,7 @@ if EVENTS_CONFIG_FILE:
def get_file_history_suffix(): def get_file_history_suffix():
return seafevents_api.get_file_history_suffix(parsed_events_conf) return seafevents_api.get_file_history_suffix(parsed_events_conf)
def get_trash_records(repo_id, show_time, start, limit): def get_trash_records(repo_id, show_time, start, limit):
with _get_seafevents_session() as session: with _get_seafevents_session() as session:
res, total_count = seafevents_api.get_delete_records(session, repo_id, show_time, start, limit) res, total_count = seafevents_api.get_delete_records(session, repo_id, show_time, start, limit)
@ -1189,7 +1190,7 @@ if EVENTS_CONFIG_FILE:
else: else:
logging.debug('search: not enabled') logging.debug('search: not enabled')
return enabled return enabled
def check_seasearch_enabled(): def check_seasearch_enabled():
enabled = False enabled = False
if hasattr(seafevents_api, 'is_seasearch_enabled'): if hasattr(seafevents_api, 'is_seasearch_enabled'):
@ -1315,15 +1316,34 @@ def send_perm_audit_msg(etype, from_user, to, repo_id, path, perm):
- `path`: dir path - `path`: dir path
- `perm`: r or rw - `perm`: r or rw
""" """
msg = 'perm-change\t%s\t%s\t%s\t%s\t%s\t%s' % \
(etype, from_user, to, repo_id, path, perm) msg = {
'msg_type': 'perm-change',
'etype': etype,
'from_user': from_user,
'to': to,
'repo_id': repo_id,
'file_path': path,
'perm': perm,
}
try: try:
seafile_api.publish_event('seahub.audit', msg) seafile_api.publish_event('seahub.audit', json.dumps(msg))
except Exception as e: except Exception as e:
logger.error("Error when sending perm-audit-%s message: %s" % logger.error("Error when sending perm-audit-%s message: %s" %
(etype, str(e))) (etype, str(e)))
def send_user_login_msg(username, timestamp, org_id):
msg = {
'msg_type': 'user-login',
'user_name': username,
'timestamp': timestamp,
'org_id': org_id,
}
seafile_api.publish_event('seahub.stats', json.dumps(msg))
def get_origin_repo_info(repo_id): def get_origin_repo_info(repo_id):
repo = seafile_api.get_repo(repo_id) repo = seafile_api.get_repo(repo_id)
if repo.origin_repo_id is not None: if repo.origin_repo_id is not None:

View File

@ -1395,7 +1395,7 @@ def view_shared_file(request, fileshare):
def view_file_via_shared_dir(request, fileshare): def view_file_via_shared_dir(request, fileshare):
from seahub.utils import redirect_to_login from seahub.utils import redirect_to_login
token = fileshare.token token = fileshare.token
if not check_share_link_user_access(fileshare, request.user.username): if not check_share_link_user_access(fileshare, request.user.username):
if not request.user.username: if not request.user.username:
return redirect_to_login(request) return redirect_to_login(request)
@ -1697,11 +1697,17 @@ def send_file_access_msg(request, repo, path, access_from):
ip = get_remote_ip(request) ip = get_remote_ip(request)
user_agent = request.headers.get("user-agent") user_agent = request.headers.get("user-agent")
msg = 'file-download-%s\t%s\t%s\t%s\t%s\t%s' % \ msg = {
(access_from, username, ip, user_agent, repo.id, path) 'msg_type': 'file-download-' + access_from,
'user_name': username,
'ip': ip,
'user_agent': user_agent,
'repo_id': repo.id,
'file_path': path,
}
try: try:
seafile_api.publish_event('seahub.audit', msg) seafile_api.publish_event('seahub.audit', json.dumps(msg))
except Exception as e: except Exception as e:
logger.error("Error when sending file-download-%s message: %s" % logger.error("Error when sending file-download-%s message: %s" %
(access_from, str(e))) (access_from, str(e)))
@ -2150,14 +2156,14 @@ def view_sdoc_revision(request, repo_id, revision_id):
revision = SeadocRevision.objects.get_by_revision_id(repo_id, revision_id) revision = SeadocRevision.objects.get_by_revision_id(repo_id, revision_id)
if not revision: if not revision:
return render_error(request, 'revision not found') return render_error(request, 'revision not found')
is_published = revision.is_published is_published = revision.is_published
if is_published: if is_published:
origin_file_uuid = revision.origin_doc_uuid origin_file_uuid = revision.origin_doc_uuid
origin_uuid_map = FileUUIDMap.objects.get_fileuuidmap_by_uuid(origin_file_uuid) origin_uuid_map = FileUUIDMap.objects.get_fileuuidmap_by_uuid(origin_file_uuid)
if not origin_uuid_map: if not origin_uuid_map:
return render_error(request, _('The original file does not exist')) return render_error(request, _('The original file does not exist'))
parent_dir = origin_uuid_map.parent_path parent_dir = origin_uuid_map.parent_path
filename = origin_uuid_map.filename filename = origin_uuid_map.filename
path = posixpath.join(parent_dir, filename) path = posixpath.join(parent_dir, filename)