mirror of
https://github.com/haiwen/seahub.git
synced 2025-08-22 08:47:22 +00:00
Merge pull request #4925 from haiwen/onlyoffice-cache-log
Onlyoffice cache log
This commit is contained in:
commit
cbc99a161d
@ -21,7 +21,7 @@ from seahub.onlyoffice.settings import ONLYOFFICE_APIJS_URL, \
|
|||||||
ONLYOFFICE_FORCE_SAVE, ONLYOFFICE_JWT_SECRET
|
ONLYOFFICE_FORCE_SAVE, ONLYOFFICE_JWT_SECRET
|
||||||
|
|
||||||
# Get an instance of a logger
|
# Get an instance of a logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger('onlyoffice')
|
||||||
|
|
||||||
|
|
||||||
def generate_onlyoffice_cache_key(repo_id, file_path):
|
def generate_onlyoffice_cache_key(repo_id, file_path):
|
||||||
@ -33,6 +33,8 @@ def generate_onlyoffice_cache_key(repo_id, file_path):
|
|||||||
def get_onlyoffice_dict(request, username, repo_id, file_path, file_id='',
|
def get_onlyoffice_dict(request, username, repo_id, file_path, file_id='',
|
||||||
can_edit=False, can_download=True):
|
can_edit=False, can_download=True):
|
||||||
|
|
||||||
|
logger.info('{} open file {} in repo {}'.format(username, file_path, repo_id))
|
||||||
|
|
||||||
repo = seafile_api.get_repo(repo_id)
|
repo = seafile_api.get_repo(repo_id)
|
||||||
if repo.is_virtual:
|
if repo.is_virtual:
|
||||||
origin_repo_id = repo.origin_repo_id
|
origin_repo_id = repo.origin_repo_id
|
||||||
@ -65,9 +67,11 @@ def get_onlyoffice_dict(request, username, repo_id, file_path, file_id='',
|
|||||||
else:
|
else:
|
||||||
document_type = 'text'
|
document_type = 'text'
|
||||||
|
|
||||||
cache_key = generate_onlyoffice_cache_key(repo_id, file_path)
|
cache_key = generate_onlyoffice_cache_key(origin_repo_id, origin_file_path)
|
||||||
doc_key = cache.get(cache_key)
|
doc_key = cache.get(cache_key)
|
||||||
|
|
||||||
|
logger.info('get doc_key {} from cache by cache_key {}'.format(doc_key, cache_key))
|
||||||
|
|
||||||
# temporary solution when failed to get data from cache(django_pylibmc)
|
# temporary solution when failed to get data from cache(django_pylibmc)
|
||||||
# when init process for the first time
|
# when init process for the first time
|
||||||
if not doc_key:
|
if not doc_key:
|
||||||
@ -79,11 +83,13 @@ def get_onlyoffice_dict(request, username, repo_id, file_path, file_id='',
|
|||||||
if not doc_key:
|
if not doc_key:
|
||||||
info_bytes = force_bytes(origin_repo_id + origin_file_path + file_id)
|
info_bytes = force_bytes(origin_repo_id + origin_file_path + file_id)
|
||||||
doc_key = hashlib.md5(info_bytes).hexdigest()[:20]
|
doc_key = hashlib.md5(info_bytes).hexdigest()[:20]
|
||||||
|
logger.info('generate new doc_key {} by info {}'.format(doc_key, info_bytes))
|
||||||
|
|
||||||
doc_info = json.dumps({'repo_id': repo_id,
|
doc_info = json.dumps({'repo_id': origin_repo_id,
|
||||||
'file_path': file_path,
|
'file_path': origin_file_path,
|
||||||
'username': username})
|
'username': username})
|
||||||
cache.set("ONLYOFFICE_%s" % doc_key, doc_info, None)
|
cache.set("ONLYOFFICE_%s" % doc_key, doc_info, None)
|
||||||
|
logger.info('set doc_key {} and doc_info {} to cache'.format(doc_key, doc_info))
|
||||||
|
|
||||||
file_name = os.path.basename(file_path.rstrip('/'))
|
file_name = os.path.basename(file_path.rstrip('/'))
|
||||||
doc_url = gen_file_get_url(dl_token, file_name)
|
doc_url = gen_file_get_url(dl_token, file_name)
|
||||||
|
@ -16,7 +16,7 @@ from seahub.utils.file_op import if_locked_by_online_office
|
|||||||
|
|
||||||
|
|
||||||
# Get an instance of a logger
|
# Get an instance of a logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger('onlyoffice')
|
||||||
|
|
||||||
|
|
||||||
@csrf_exempt
|
@csrf_exempt
|
||||||
@ -94,6 +94,8 @@ def onlyoffice_editor_callback(request):
|
|||||||
file_path = doc_info['file_path']
|
file_path = doc_info['file_path']
|
||||||
username = doc_info['username']
|
username = doc_info['username']
|
||||||
|
|
||||||
|
logger.info('status {}: get doc_info {} from cache by doc_key {}'.format(status, doc_info, doc_key))
|
||||||
|
|
||||||
cache_key = generate_onlyoffice_cache_key(repo_id, file_path)
|
cache_key = generate_onlyoffice_cache_key(repo_id, file_path)
|
||||||
|
|
||||||
# save file
|
# save file
|
||||||
@ -131,6 +133,7 @@ def onlyoffice_editor_callback(request):
|
|||||||
# 2 - document is ready for saving,
|
# 2 - document is ready for saving,
|
||||||
if status == 2:
|
if status == 2:
|
||||||
|
|
||||||
|
logger.info('status {}: delete cache_key {} from cache'.format(status, cache_key))
|
||||||
cache.delete(cache_key)
|
cache.delete(cache_key)
|
||||||
|
|
||||||
if is_pro_version() and if_locked_by_online_office(repo_id, file_path):
|
if is_pro_version() and if_locked_by_online_office(repo_id, file_path):
|
||||||
@ -139,11 +142,13 @@ def onlyoffice_editor_callback(request):
|
|||||||
# 6 - document is being edited, but the current document state is saved,
|
# 6 - document is being edited, but the current document state is saved,
|
||||||
if status == 6:
|
if status == 6:
|
||||||
# cache document key when forcesave
|
# cache document key when forcesave
|
||||||
cache.set(cache_key, doc_key)
|
logger.info('status {}: set cache_key {} and doc_key {} to cache'.format(status, cache_key, doc_key))
|
||||||
|
cache.set(cache_key, doc_key, None)
|
||||||
|
|
||||||
# 4 - document is closed with no changes,
|
# 4 - document is closed with no changes,
|
||||||
if status == 4:
|
if status == 4:
|
||||||
|
|
||||||
|
logger.info('status {}: delete cache_key {} from cache'.format(status, cache_key))
|
||||||
cache.delete(cache_key)
|
cache.delete(cache_key)
|
||||||
|
|
||||||
if is_pro_version() and if_locked_by_online_office(repo_id, file_path):
|
if is_pro_version() and if_locked_by_online_office(repo_id, file_path):
|
||||||
|
@ -600,6 +600,14 @@ LOGGING = {
|
|||||||
'backupCount': 5,
|
'backupCount': 5,
|
||||||
'formatter': 'standard',
|
'formatter': 'standard',
|
||||||
},
|
},
|
||||||
|
'onlyoffice_handler': {
|
||||||
|
'level': 'INFO',
|
||||||
|
'class': 'logging.handlers.RotatingFileHandler',
|
||||||
|
'filename': os.path.join(LOG_DIR, 'onlyoffice.log'),
|
||||||
|
'maxBytes': 1024*1024*100, # 100 MB
|
||||||
|
'backupCount': 5,
|
||||||
|
'formatter': 'standard',
|
||||||
|
},
|
||||||
'mail_admins': {
|
'mail_admins': {
|
||||||
'level': 'ERROR',
|
'level': 'ERROR',
|
||||||
'filters': ['require_debug_false'],
|
'filters': ['require_debug_false'],
|
||||||
@ -622,6 +630,11 @@ LOGGING = {
|
|||||||
'level': 'INFO',
|
'level': 'INFO',
|
||||||
'propagate': False
|
'propagate': False
|
||||||
},
|
},
|
||||||
|
'onlyoffice': {
|
||||||
|
'handlers': ['onlyoffice_handler', ],
|
||||||
|
'level': 'INFO',
|
||||||
|
'propagate': False
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user