mirror of
https://github.com/haiwen/seahub.git
synced 2025-09-25 14:50:29 +00:00
Merge branch '7.1' into 8.0
This commit is contained in:
@@ -1,5 +1,6 @@
|
|||||||
# Copyright (c) 2012-2016 Seafile Ltd.
|
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
import logging
|
import logging
|
||||||
import posixpath
|
import posixpath
|
||||||
import requests
|
import requests
|
||||||
@@ -27,8 +28,7 @@ from seahub.constants import PERMISSION_READ_WRITE
|
|||||||
from seahub.utils.repo import parse_repo_perm, is_repo_admin, is_repo_owner
|
from seahub.utils.repo import parse_repo_perm, is_repo_admin, is_repo_owner
|
||||||
from seahub.utils.file_types import MARKDOWN, TEXT
|
from seahub.utils.file_types import MARKDOWN, TEXT
|
||||||
|
|
||||||
from seahub.settings import MAX_UPLOAD_FILE_NAME_LEN, \
|
from seahub.settings import MAX_UPLOAD_FILE_NAME_LEN, OFFICE_TEMPLATE_ROOT
|
||||||
FILE_LOCK_EXPIRATION_DAYS, OFFICE_TEMPLATE_ROOT
|
|
||||||
|
|
||||||
from seahub.drafts.models import Draft
|
from seahub.drafts.models import Draft
|
||||||
from seahub.drafts.utils import is_draft_file, get_file_draft
|
from seahub.drafts.utils import is_draft_file, get_file_draft
|
||||||
@@ -569,10 +569,24 @@ class FileView(APIView):
|
|||||||
error_msg = _("File is locked")
|
error_msg = _("File is locked")
|
||||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
# lock file
|
expire = request.data.get('expire', 0)
|
||||||
expire = request.data.get('expire', FILE_LOCK_EXPIRATION_DAYS)
|
|
||||||
try:
|
try:
|
||||||
seafile_api.lock_file(repo_id, path, username, expire)
|
expire = int(expire)
|
||||||
|
except ValueError:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
if expire < 0:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
# lock file
|
||||||
|
try:
|
||||||
|
if expire > 0:
|
||||||
|
seafile_api.lock_file(repo_id, path, username,
|
||||||
|
int(time.time()) + expire)
|
||||||
|
else:
|
||||||
|
seafile_api.lock_file(repo_id, path, username)
|
||||||
except SearpcError as e:
|
except SearpcError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
error_msg = 'Internal Server Error'
|
error_msg = 'Internal Server Error'
|
||||||
@@ -604,9 +618,24 @@ class FileView(APIView):
|
|||||||
error_msg = _("File is not locked.")
|
error_msg = _("File is not locked.")
|
||||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
expire = request.data.get('expire', 0)
|
||||||
|
try:
|
||||||
|
expire = int(expire)
|
||||||
|
except ValueError:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
if expire < 0:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
if locked_by_me or locked_by_online_office:
|
if locked_by_me or locked_by_online_office:
|
||||||
# refresh lock file
|
# refresh lock file
|
||||||
try:
|
try:
|
||||||
|
if expire > 0:
|
||||||
|
seafile_api.refresh_file_lock(repo_id, path,
|
||||||
|
int(time.time()) + expire)
|
||||||
|
else:
|
||||||
seafile_api.refresh_file_lock(repo_id, path)
|
seafile_api.refresh_file_lock(repo_id, path)
|
||||||
except SearpcError as e:
|
except SearpcError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
@@ -117,7 +117,7 @@ class SearchUser(APIView):
|
|||||||
# search user from user's contacts
|
# search user from user's contacts
|
||||||
email_list += search_user_when_global_address_book_disabled(request, q)
|
email_list += search_user_when_global_address_book_disabled(request, q)
|
||||||
|
|
||||||
## search finished, now filter out some users
|
# search finished, now filter out some users
|
||||||
|
|
||||||
# remove duplicate emails
|
# remove duplicate emails
|
||||||
# get_emailusers_in_list can only accept 20 users at most
|
# get_emailusers_in_list can only accept 20 users at most
|
||||||
@@ -129,6 +129,7 @@ class SearchUser(APIView):
|
|||||||
email_list_json = json.dumps(email_list)
|
email_list_json = json.dumps(email_list)
|
||||||
user_obj_list = ccnet_api.get_emailusers_in_list('DB', email_list_json) + \
|
user_obj_list = ccnet_api.get_emailusers_in_list('DB', email_list_json) + \
|
||||||
ccnet_api.get_emailusers_in_list('LDAP', email_list_json)
|
ccnet_api.get_emailusers_in_list('LDAP', email_list_json)
|
||||||
|
|
||||||
for user_obj in user_obj_list:
|
for user_obj in user_obj_list:
|
||||||
if user_obj.is_active:
|
if user_obj.is_active:
|
||||||
email_result.append(user_obj.email)
|
email_result.append(user_obj.email)
|
||||||
@@ -136,7 +137,7 @@ class SearchUser(APIView):
|
|||||||
if django_settings.ENABLE_ADDRESSBOOK_OPT_IN:
|
if django_settings.ENABLE_ADDRESSBOOK_OPT_IN:
|
||||||
# get users who has setted to show in address book
|
# get users who has setted to show in address book
|
||||||
listed_users = Profile.objects.filter(list_in_address_book=True).values('user')
|
listed_users = Profile.objects.filter(list_in_address_book=True).values('user')
|
||||||
listed_user_list = [ u['user'] for u in listed_users ]
|
listed_user_list = [u['user'] for u in listed_users]
|
||||||
|
|
||||||
email_result = list(set(email_result) & set(listed_user_list))
|
email_result = list(set(email_result) & set(listed_user_list))
|
||||||
|
|
||||||
@@ -164,6 +165,7 @@ class SearchUser(APIView):
|
|||||||
|
|
||||||
return Response({"users": formated_result})
|
return Response({"users": formated_result})
|
||||||
|
|
||||||
|
|
||||||
def format_searched_user_result(request, users, size):
|
def format_searched_user_result(request, users, size):
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
@@ -178,6 +180,7 @@ def format_searched_user_result(request, users, size):
|
|||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def search_user_from_ccnet(q):
|
def search_user_from_ccnet(q):
|
||||||
""" Return 10 items at most.
|
""" Return 10 items at most.
|
||||||
"""
|
"""
|
||||||
@@ -204,13 +207,15 @@ def search_user_from_ccnet(q):
|
|||||||
|
|
||||||
return email_list
|
return email_list
|
||||||
|
|
||||||
|
|
||||||
def search_user_from_profile(q):
|
def search_user_from_profile(q):
|
||||||
""" Return 10 items at most.
|
""" Return 10 items at most.
|
||||||
"""
|
"""
|
||||||
# 'nickname__icontains' for search by nickname
|
# 'nickname__icontains' for search by nickname
|
||||||
# 'contact_email__icontains' for search by contact email
|
# 'contact_email__icontains' for search by contact email
|
||||||
users = Profile.objects.filter(Q(nickname__icontains=q) | \
|
users = Profile.objects.filter(Q(nickname__icontains=q) | \
|
||||||
Q(contact_email__icontains=q)).values('user')[:10]
|
Q(contact_email__icontains=q) | \
|
||||||
|
Q(login_id__icontains=q)).values('user')[:10]
|
||||||
|
|
||||||
email_list = []
|
email_list = []
|
||||||
for user in users:
|
for user in users:
|
||||||
@@ -218,12 +223,14 @@ def search_user_from_profile(q):
|
|||||||
|
|
||||||
return email_list
|
return email_list
|
||||||
|
|
||||||
|
|
||||||
def search_user_from_profile_with_limits(q, limited_emails):
|
def search_user_from_profile_with_limits(q, limited_emails):
|
||||||
""" Return 10 items at most.
|
""" Return 10 items at most.
|
||||||
"""
|
"""
|
||||||
# search within limited_emails
|
# search within limited_emails
|
||||||
users = Profile.objects.filter(Q(user__in=limited_emails) &
|
users = Profile.objects.filter(Q(user__in=limited_emails) & (Q(nickname__icontains=q) | \
|
||||||
(Q(nickname__icontains=q) | Q(contact_email__icontains=q))).values('user')[:10]
|
Q(contact_email__icontains=q) | \
|
||||||
|
Q(login_id__icontains=q))).values('user')[:10]
|
||||||
|
|
||||||
email_list = []
|
email_list = []
|
||||||
for user in users:
|
for user in users:
|
||||||
@@ -231,7 +238,9 @@ def search_user_from_profile_with_limits(q, limited_emails):
|
|||||||
|
|
||||||
return email_list
|
return email_list
|
||||||
|
|
||||||
|
|
||||||
def search_user_when_global_address_book_disabled(request, q):
|
def search_user_when_global_address_book_disabled(request, q):
|
||||||
|
|
||||||
""" Return 10 items at most.
|
""" Return 10 items at most.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@@ -242,6 +251,7 @@ def search_user_when_global_address_book_disabled(request, q):
|
|||||||
# get user's contact list
|
# get user's contact list
|
||||||
contacts = Contact.objects.get_contacts_by_user(username)
|
contacts = Contact.objects.get_contacts_by_user(username)
|
||||||
for contact in contacts:
|
for contact in contacts:
|
||||||
|
|
||||||
# search user from contact list
|
# search user from contact list
|
||||||
if q in contact.contact_email:
|
if q in contact.contact_email:
|
||||||
email_list.append(contact.contact_email)
|
email_list.append(contact.contact_email)
|
||||||
@@ -254,8 +264,11 @@ def search_user_when_global_address_book_disabled(request, q):
|
|||||||
email_list += search_user_from_profile_with_limits(q, limited_emails)
|
email_list += search_user_from_profile_with_limits(q, limited_emails)
|
||||||
|
|
||||||
current_user = User.objects.get(email=username)
|
current_user = User.objects.get(email=username)
|
||||||
if is_valid_email(q) and current_user.role.lower() != 'guest':
|
if current_user.role.lower() != 'guest':
|
||||||
# if `q` is a valid email and current is not a guest user
|
|
||||||
|
if is_valid_email(q):
|
||||||
|
|
||||||
|
# if `q` is a valid email
|
||||||
email_list.append(q)
|
email_list.append(q)
|
||||||
|
|
||||||
# get user whose `contact_email` is `q`
|
# get user whose `contact_email` is `q`
|
||||||
@@ -263,4 +276,9 @@ def search_user_when_global_address_book_disabled(request, q):
|
|||||||
for user in users:
|
for user in users:
|
||||||
email_list.append(user['user'])
|
email_list.append(user['user'])
|
||||||
|
|
||||||
|
# get user whose `login_id` is `q`
|
||||||
|
username_by_login_id = Profile.objects.get_username_by_login_id(q)
|
||||||
|
if username_by_login_id:
|
||||||
|
email_list.append(username_by_login_id)
|
||||||
|
|
||||||
return email_list
|
return email_list
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import stat
|
import stat
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
import logging
|
import logging
|
||||||
import posixpath
|
import posixpath
|
||||||
from constance import config
|
from constance import config
|
||||||
@@ -112,6 +113,7 @@ def get_share_link_info(fileshare):
|
|||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def check_permissions_arg(request):
|
def check_permissions_arg(request):
|
||||||
|
|
||||||
permissions = request.data.get('permissions', '')
|
permissions = request.data.get('permissions', '')
|
||||||
@@ -144,6 +146,7 @@ def check_permissions_arg(request):
|
|||||||
|
|
||||||
return perm
|
return perm
|
||||||
|
|
||||||
|
|
||||||
class ShareLinks(APIView):
|
class ShareLinks(APIView):
|
||||||
|
|
||||||
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
||||||
@@ -213,7 +216,8 @@ class ShareLinks(APIView):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
permission = seafile_api.check_permission_by_path(repo_id,
|
permission = seafile_api.check_permission_by_path(repo_id,
|
||||||
folder_path, fileshare.username)
|
folder_path,
|
||||||
|
fileshare.username)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
permission = ''
|
permission = ''
|
||||||
@@ -223,8 +227,7 @@ class ShareLinks(APIView):
|
|||||||
links_info = []
|
links_info = []
|
||||||
for fs in fileshares:
|
for fs in fileshares:
|
||||||
link_info = get_share_link_info(fs)
|
link_info = get_share_link_info(fs)
|
||||||
link_info['repo_folder_permission'] = \
|
link_info['repo_folder_permission'] = repo_folder_permission_dict.get(link_info['repo_id'], '')
|
||||||
repo_folder_permission_dict.get(link_info['repo_id'], '')
|
|
||||||
links_info.append(link_info)
|
links_info.append(link_info)
|
||||||
|
|
||||||
if len(links_info) == 1:
|
if len(links_info) == 1:
|
||||||
@@ -407,6 +410,7 @@ class ShareLinks(APIView):
|
|||||||
link_info = get_share_link_info(fs)
|
link_info = get_share_link_info(fs)
|
||||||
return Response(link_info)
|
return Response(link_info)
|
||||||
|
|
||||||
|
|
||||||
class ShareLink(APIView):
|
class ShareLink(APIView):
|
||||||
|
|
||||||
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
authentication_classes = (TokenAuthentication, SessionAuthentication)
|
||||||
@@ -477,7 +481,8 @@ class ShareLink(APIView):
|
|||||||
|
|
||||||
username = request.user.username
|
username = request.user.username
|
||||||
repo_folder_permission = seafile_api.check_permission_by_path(repo_id,
|
repo_folder_permission = seafile_api.check_permission_by_path(repo_id,
|
||||||
folder_path, username)
|
folder_path,
|
||||||
|
username)
|
||||||
if not repo_folder_permission:
|
if not repo_folder_permission:
|
||||||
error_msg = 'Permission denied.'
|
error_msg = 'Permission denied.'
|
||||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||||
@@ -561,6 +566,17 @@ class ShareLinkOnlineOfficeLock(APIView):
|
|||||||
3, File must have been locked by OnlineOffice.
|
3, File must have been locked by OnlineOffice.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
expire = request.data.get('expire', 0)
|
||||||
|
try:
|
||||||
|
expire = int(expire)
|
||||||
|
except ValueError:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
|
if expire < 0:
|
||||||
|
error_msg = 'expire invalid.'
|
||||||
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
if SHARE_LINK_LOGIN_REQUIRED and \
|
if SHARE_LINK_LOGIN_REQUIRED and \
|
||||||
not request.user.is_authenticated():
|
not request.user.is_authenticated():
|
||||||
error_msg = 'Permission denied.'
|
error_msg = 'Permission denied.'
|
||||||
@@ -581,7 +597,8 @@ class ShareLinkOnlineOfficeLock(APIView):
|
|||||||
path = normalize_file_path(share_link.path)
|
path = normalize_file_path(share_link.path)
|
||||||
parent_dir = os.path.dirname(path)
|
parent_dir = os.path.dirname(path)
|
||||||
if seafile_api.check_permission_by_path(repo_id,
|
if seafile_api.check_permission_by_path(repo_id,
|
||||||
parent_dir, shared_by) != PERMISSION_READ_WRITE:
|
parent_dir,
|
||||||
|
shared_by) != PERMISSION_READ_WRITE:
|
||||||
error_msg = 'Permission denied.'
|
error_msg = 'Permission denied.'
|
||||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||||
|
|
||||||
@@ -593,9 +610,11 @@ class ShareLinkOnlineOfficeLock(APIView):
|
|||||||
|
|
||||||
locked_by_online_office = if_locked_by_online_office(repo_id, path)
|
locked_by_online_office = if_locked_by_online_office(repo_id, path)
|
||||||
if locked_by_online_office:
|
if locked_by_online_office:
|
||||||
|
|
||||||
# refresh lock file
|
# refresh lock file
|
||||||
try:
|
try:
|
||||||
seafile_api.refresh_file_lock(repo_id, path)
|
seafile_api.refresh_file_lock(repo_id, path,
|
||||||
|
int(time.time()) + expire)
|
||||||
except SearpcError as e:
|
except SearpcError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
error_msg = 'Internal Server Error'
|
error_msg = 'Internal Server Error'
|
||||||
@@ -681,7 +700,8 @@ class ShareLinkDirents(APIView):
|
|||||||
try:
|
try:
|
||||||
current_commit = seafile_api.get_commit_list(repo_id, 0, 1)[0]
|
current_commit = seafile_api.get_commit_list(repo_id, 0, 1)[0]
|
||||||
dirent_list = seafile_api.list_dir_by_commit_and_path(repo_id,
|
dirent_list = seafile_api.list_dir_by_commit_and_path(repo_id,
|
||||||
current_commit.id, path, -1, -1)
|
current_commit.id,
|
||||||
|
path, -1, -1)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
error_msg = 'Internal Server Error'
|
error_msg = 'Internal Server Error'
|
||||||
|
@@ -7,6 +7,7 @@ from django.utils.http import int_to_base36
|
|||||||
from seaserv import ccnet_api
|
from seaserv import ccnet_api
|
||||||
|
|
||||||
from seahub.base.accounts import User
|
from seahub.base.accounts import User
|
||||||
|
from seahub.base.templatetags.seahub_tags import email2contact_email
|
||||||
from seahub.auth import authenticate
|
from seahub.auth import authenticate
|
||||||
from seahub.auth.tokens import default_token_generator
|
from seahub.auth.tokens import default_token_generator
|
||||||
from seahub.options.models import UserOptions
|
from seahub.options.models import UserOptions
|
||||||
@@ -152,7 +153,8 @@ class PasswordResetForm(forms.Form):
|
|||||||
}
|
}
|
||||||
|
|
||||||
send_html_email(_("Reset Password on %s") % site_name,
|
send_html_email(_("Reset Password on %s") % site_name,
|
||||||
email_template_name, c, None, [user.username])
|
email_template_name, c, None,
|
||||||
|
[email2contact_email(user.username)])
|
||||||
|
|
||||||
class SetPasswordForm(forms.Form):
|
class SetPasswordForm(forms.Form):
|
||||||
"""
|
"""
|
||||||
|
@@ -1,59 +0,0 @@
|
|||||||
{% load i18n %}
|
|
||||||
<script type="text/javascript">
|
|
||||||
var SEAFILE_GLOBAL = {
|
|
||||||
csrfCookieName: '{{ CSRF_COOKIE_NAME }}'
|
|
||||||
};
|
|
||||||
|
|
||||||
{% if can_edit %}
|
|
||||||
var interval;
|
|
||||||
{% if not share_link_token %}
|
|
||||||
var refreshLock = function() {
|
|
||||||
$.ajax({
|
|
||||||
url: '{% url "api-v2.1-file-view" repo_id %}' + '?p={{path|urlencode}}',
|
|
||||||
type: 'PUT',
|
|
||||||
dataType: 'json',
|
|
||||||
data: {
|
|
||||||
'operation': 'refresh-lock'
|
|
||||||
},
|
|
||||||
cache: false,
|
|
||||||
beforeSend: prepareCSRFToken,
|
|
||||||
success: function(data) {
|
|
||||||
},
|
|
||||||
error: function(xhr) {
|
|
||||||
if (xhr.responseText) {
|
|
||||||
feedback(JSON.parse(xhr.responseText).error_msg, 'error');
|
|
||||||
} else {
|
|
||||||
feedback("{% trans "Failed. Please check the network." %}", 'error');
|
|
||||||
}
|
|
||||||
clearInterval(interval);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
{% else %}
|
|
||||||
var refreshLock = function() {
|
|
||||||
$.ajax({
|
|
||||||
url: '{% url "api-v2.1-share-link-online-office-lock" share_link_token %}',
|
|
||||||
type: 'PUT',
|
|
||||||
dataType: 'json',
|
|
||||||
cache: false,
|
|
||||||
beforeSend: prepareCSRFToken,
|
|
||||||
success: function(data) {
|
|
||||||
},
|
|
||||||
error: function(xhr) {
|
|
||||||
if (xhr.responseText) {
|
|
||||||
feedback(JSON.parse(xhr.responseText).error_msg, 'error');
|
|
||||||
} else {
|
|
||||||
feedback("{% trans "Failed. Please check the network." %}", 'error');
|
|
||||||
}
|
|
||||||
clearInterval(interval);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
{% endif %}
|
|
||||||
interval = setInterval(refreshLock, 60 * 1000);
|
|
||||||
{% endif %}
|
|
||||||
</script>
|
|
||||||
{% if enable_watermark %}
|
|
||||||
<script type="text/javascript" src="{{ MEDIA_URL }}js/watermark.js"></script>
|
|
||||||
<script type="text/javascript">{% include "snippets/add_watermark.html" %}</script>
|
|
||||||
{% endif %}
|
|
@@ -53,6 +53,67 @@ html, body { padding:0; margin:0; height:100%; }
|
|||||||
};
|
};
|
||||||
var docEditor = new DocsAPI.DocEditor("placeholder", config);
|
var docEditor = new DocsAPI.DocEditor("placeholder", config);
|
||||||
</script>
|
</script>
|
||||||
{% include 'snippets/wopi_onlyoffice_js.html' %}
|
<script type="text/javascript">
|
||||||
|
var SEAFILE_GLOBAL = {
|
||||||
|
csrfCookieName: '{{ CSRF_COOKIE_NAME }}'
|
||||||
|
};
|
||||||
|
|
||||||
|
{% if can_edit %}
|
||||||
|
var interval;
|
||||||
|
{% if not share_link_token %}
|
||||||
|
var refreshLock = function() {
|
||||||
|
$.ajax({
|
||||||
|
url: '{% url "api-v2.1-file-view" repo_id %}' + '?p={{path|urlencode}}',
|
||||||
|
type: 'PUT',
|
||||||
|
dataType: 'json',
|
||||||
|
data: {
|
||||||
|
'operation': 'refresh-lock',
|
||||||
|
'expire': 2400
|
||||||
|
},
|
||||||
|
cache: false,
|
||||||
|
beforeSend: prepareCSRFToken,
|
||||||
|
success: function(data) {
|
||||||
|
},
|
||||||
|
error: function(xhr) {
|
||||||
|
if (xhr.responseText) {
|
||||||
|
feedback(JSON.parse(xhr.responseText).error_msg, 'error');
|
||||||
|
} else {
|
||||||
|
feedback("{% trans "Failed. Please check the network." %}", 'error');
|
||||||
|
}
|
||||||
|
clearInterval(interval);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
{% else %}
|
||||||
|
var refreshLock = function() {
|
||||||
|
$.ajax({
|
||||||
|
url: '{% url "api-v2.1-share-link-online-office-lock" share_link_token %}',
|
||||||
|
type: 'PUT',
|
||||||
|
dataType: 'json',
|
||||||
|
data: {
|
||||||
|
'expire': 2400
|
||||||
|
},
|
||||||
|
cache: false,
|
||||||
|
beforeSend: prepareCSRFToken,
|
||||||
|
success: function(data) {
|
||||||
|
},
|
||||||
|
error: function(xhr) {
|
||||||
|
if (xhr.responseText) {
|
||||||
|
feedback(JSON.parse(xhr.responseText).error_msg, 'error');
|
||||||
|
} else {
|
||||||
|
feedback("{% trans "Failed. Please check the network." %}", 'error');
|
||||||
|
}
|
||||||
|
clearInterval(interval);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
{% endif %}
|
||||||
|
interval = setInterval(refreshLock, 40 * 60 * 1000);
|
||||||
|
{% endif %}
|
||||||
|
</script>
|
||||||
|
{% if enable_watermark %}
|
||||||
|
<script type="text/javascript" src="{{ MEDIA_URL }}js/watermark.js"></script>
|
||||||
|
<script type="text/javascript">{% include "snippets/add_watermark.html" %}</script>
|
||||||
|
{% endif %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@@ -45,6 +45,7 @@
|
|||||||
document.getElementById('office_form').submit();
|
document.getElementById('office_form').submit();
|
||||||
document.getElementById('office_frame').className = '';
|
document.getElementById('office_frame').className = '';
|
||||||
</script>
|
</script>
|
||||||
{% include 'snippets/wopi_onlyoffice_js.html' %}
|
<script type="text/javascript" src="{{ MEDIA_URL }}js/watermark.js"></script>
|
||||||
|
<script type="text/javascript">{% include "snippets/add_watermark.html" %}</script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@@ -7,6 +7,7 @@ view_snapshot_file, view_shared_file, etc.
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
import stat
|
import stat
|
||||||
import urllib.request, urllib.error, urllib.parse
|
import urllib.request, urllib.error, urllib.parse
|
||||||
import chardet
|
import chardet
|
||||||
@@ -790,9 +791,11 @@ def view_lib_file(request, repo_id, path):
|
|||||||
if is_pro_version() and can_edit:
|
if is_pro_version() and can_edit:
|
||||||
try:
|
try:
|
||||||
if not is_locked:
|
if not is_locked:
|
||||||
seafile_api.lock_file(repo_id, path, ONLINE_OFFICE_LOCK_OWNER, 0)
|
seafile_api.lock_file(repo_id, path, ONLINE_OFFICE_LOCK_OWNER,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
elif locked_by_online_office:
|
elif locked_by_online_office:
|
||||||
seafile_api.refresh_file_lock(repo_id, path)
|
seafile_api.refresh_file_lock(repo_id, path,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
@@ -1163,9 +1166,11 @@ def view_shared_file(request, fileshare):
|
|||||||
locked_by_online_office = if_locked_by_online_office(repo_id, path)
|
locked_by_online_office = if_locked_by_online_office(repo_id, path)
|
||||||
try:
|
try:
|
||||||
if not is_locked:
|
if not is_locked:
|
||||||
seafile_api.lock_file(repo_id, path, ONLINE_OFFICE_LOCK_OWNER, 0)
|
seafile_api.lock_file(repo_id, path, ONLINE_OFFICE_LOCK_OWNER,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
elif locked_by_online_office:
|
elif locked_by_online_office:
|
||||||
seafile_api.refresh_file_lock(repo_id, path)
|
seafile_api.refresh_file_lock(repo_id, path,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
@@ -3,11 +3,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
|
import time
|
||||||
import logging
|
import logging
|
||||||
import urllib.request, urllib.error, urllib.parse
|
import urllib.request
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
import requests
|
import requests
|
||||||
import hashlib
|
import hashlib
|
||||||
import urllib.parse
|
|
||||||
import posixpath
|
import posixpath
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
@@ -21,8 +23,10 @@ from seaserv import seafile_api
|
|||||||
|
|
||||||
from seahub.base.accounts import User, ANONYMOUS_EMAIL
|
from seahub.base.accounts import User, ANONYMOUS_EMAIL
|
||||||
from seahub.base.templatetags.seahub_tags import email2nickname
|
from seahub.base.templatetags.seahub_tags import email2nickname
|
||||||
from seahub.utils import gen_inner_file_get_url, \
|
from seahub.utils import gen_inner_file_get_url, gen_inner_file_upload_url, \
|
||||||
gen_inner_file_upload_url, is_pro_version
|
is_pro_version
|
||||||
|
from seahub.utils.file_op import ONLINE_OFFICE_LOCK_OWNER, \
|
||||||
|
if_locked_by_online_office
|
||||||
from seahub.settings import SITE_ROOT
|
from seahub.settings import SITE_ROOT
|
||||||
|
|
||||||
from seahub.wopi.utils import get_file_info_by_token
|
from seahub.wopi.utils import get_file_info_by_token
|
||||||
@@ -32,13 +36,13 @@ json_content_type = 'application/json; charset=utf-8'
|
|||||||
|
|
||||||
WOPI_LOCK_EXPIRATION = 30 * 60
|
WOPI_LOCK_EXPIRATION = 30 * 60
|
||||||
|
|
||||||
|
|
||||||
def generate_file_lock_key_value(request):
|
def generate_file_lock_key_value(request):
|
||||||
|
|
||||||
token = request.GET.get('access_token', None)
|
token = request.GET.get('access_token', None)
|
||||||
|
|
||||||
info_dict = get_file_info_by_token(token)
|
info_dict = get_file_info_by_token(token)
|
||||||
repo_id = info_dict['repo_id']
|
repo_id = info_dict['repo_id']
|
||||||
file_path= info_dict['file_path']
|
file_path = info_dict['file_path']
|
||||||
|
|
||||||
repo = seafile_api.get_repo(repo_id)
|
repo = seafile_api.get_repo(repo_id)
|
||||||
if repo.is_virtual:
|
if repo.is_virtual:
|
||||||
@@ -55,25 +59,61 @@ def generate_file_lock_key_value(request):
|
|||||||
|
|
||||||
return lock_cache_key, x_wopi_lock
|
return lock_cache_key, x_wopi_lock
|
||||||
|
|
||||||
|
|
||||||
def lock_file(request):
|
def lock_file(request):
|
||||||
|
|
||||||
key, value = generate_file_lock_key_value(request)
|
key, value = generate_file_lock_key_value(request)
|
||||||
cache.set(key, value, WOPI_LOCK_EXPIRATION)
|
cache.set(key, value, WOPI_LOCK_EXPIRATION)
|
||||||
|
|
||||||
|
token = request.GET.get('access_token', None)
|
||||||
|
info_dict = get_file_info_by_token(token)
|
||||||
|
repo_id = info_dict['repo_id']
|
||||||
|
file_path = info_dict['file_path']
|
||||||
|
seafile_api.lock_file(repo_id, file_path, ONLINE_OFFICE_LOCK_OWNER,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
|
|
||||||
|
|
||||||
def unlock_file(request):
|
def unlock_file(request):
|
||||||
|
|
||||||
key, value = generate_file_lock_key_value(request)
|
key, value = generate_file_lock_key_value(request)
|
||||||
cache.delete(key)
|
cache.delete(key)
|
||||||
|
|
||||||
|
token = request.GET.get('access_token', None)
|
||||||
|
info_dict = get_file_info_by_token(token)
|
||||||
|
repo_id = info_dict['repo_id']
|
||||||
|
file_path = info_dict['file_path']
|
||||||
|
seafile_api.unlock_file(repo_id, file_path)
|
||||||
|
|
||||||
|
|
||||||
def refresh_file_lock(request):
|
def refresh_file_lock(request):
|
||||||
lock_file(request)
|
|
||||||
|
key, value = generate_file_lock_key_value(request)
|
||||||
|
cache.set(key, value, WOPI_LOCK_EXPIRATION)
|
||||||
|
|
||||||
|
token = request.GET.get('access_token', None)
|
||||||
|
info_dict = get_file_info_by_token(token)
|
||||||
|
repo_id = info_dict['repo_id']
|
||||||
|
file_path = info_dict['file_path']
|
||||||
|
seafile_api.refresh_file_lock(repo_id, file_path,
|
||||||
|
int(time.time()) + 40 * 60)
|
||||||
|
|
||||||
|
|
||||||
def file_is_locked(request):
|
def file_is_locked(request):
|
||||||
key, value = generate_file_lock_key_value(request)
|
|
||||||
return True if cache.get(key, '') else False
|
token = request.GET.get('access_token', None)
|
||||||
|
info_dict = get_file_info_by_token(token)
|
||||||
|
repo_id = info_dict['repo_id']
|
||||||
|
file_path = info_dict['file_path']
|
||||||
|
|
||||||
|
return if_locked_by_online_office(repo_id, file_path)
|
||||||
|
|
||||||
|
|
||||||
def get_current_lock_id(request):
|
def get_current_lock_id(request):
|
||||||
|
|
||||||
key, value = generate_file_lock_key_value(request)
|
key, value = generate_file_lock_key_value(request)
|
||||||
return cache.get(key, '')
|
return cache.get(key, '')
|
||||||
|
|
||||||
|
|
||||||
def access_token_check(func):
|
def access_token_check(func):
|
||||||
|
|
||||||
def _decorated(view, request, file_id, *args, **kwargs):
|
def _decorated(view, request, file_id, *args, **kwargs):
|
||||||
@@ -87,17 +127,17 @@ def access_token_check(func):
|
|||||||
info_dict = get_file_info_by_token(token)
|
info_dict = get_file_info_by_token(token)
|
||||||
if not info_dict:
|
if not info_dict:
|
||||||
logger.error('Get wopi cache value failed: wopi_access_token_%s.' % token)
|
logger.error('Get wopi cache value failed: wopi_access_token_%s.' % token)
|
||||||
return HttpResponse(json.dumps({}), status=404,
|
return HttpResponse(json.dumps({}), status=404, content_type=json_content_type)
|
||||||
content_type=json_content_type)
|
|
||||||
|
|
||||||
request_user = info_dict['request_user']
|
request_user = info_dict['request_user']
|
||||||
repo_id = info_dict['repo_id']
|
repo_id = info_dict['repo_id']
|
||||||
file_path= info_dict['file_path']
|
file_path = info_dict['file_path']
|
||||||
obj_id = info_dict['obj_id']
|
obj_id = info_dict['obj_id']
|
||||||
|
|
||||||
if not request_user or not repo_id or not file_path:
|
if not request_user or not repo_id or not file_path:
|
||||||
logger.error('File info invalid, user: %s, repo_id: %s, path: %s.' \
|
logger.error('File info invalid, user: %s, repo_id: %s, path: %s.' % (request_user,
|
||||||
% (request_user, repo_id, file_path))
|
repo_id,
|
||||||
|
file_path))
|
||||||
return HttpResponse(json.dumps({}), status=404,
|
return HttpResponse(json.dumps({}), status=404,
|
||||||
content_type=json_content_type)
|
content_type=json_content_type)
|
||||||
|
|
||||||
@@ -141,7 +181,7 @@ class WOPIFilesView(APIView):
|
|||||||
info_dict = get_file_info_by_token(token)
|
info_dict = get_file_info_by_token(token)
|
||||||
request_user = info_dict['request_user']
|
request_user = info_dict['request_user']
|
||||||
repo_id = info_dict['repo_id']
|
repo_id = info_dict['repo_id']
|
||||||
file_path= info_dict['file_path']
|
file_path = info_dict['file_path']
|
||||||
obj_id = info_dict['obj_id']
|
obj_id = info_dict['obj_id']
|
||||||
can_edit = info_dict['can_edit']
|
can_edit = info_dict['can_edit']
|
||||||
can_download = info_dict['can_download']
|
can_download = info_dict['can_download']
|
||||||
@@ -153,8 +193,7 @@ class WOPIFilesView(APIView):
|
|||||||
obj_id = seafile_api.get_file_id_by_path(repo_id, file_path)
|
obj_id = seafile_api.get_file_id_by_path(repo_id, file_path)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file_size = seafile_api.get_file_size(repo.store_id,
|
file_size = seafile_api.get_file_size(repo.store_id, repo.version, obj_id)
|
||||||
repo.version, obj_id)
|
|
||||||
except SearpcError as e:
|
except SearpcError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
return HttpResponse(json.dumps({}), status=500,
|
return HttpResponse(json.dumps({}), status=500,
|
||||||
@@ -222,7 +261,8 @@ class WOPIFilesView(APIView):
|
|||||||
def post(self, request, file_id, format=None):
|
def post(self, request, file_id, format=None):
|
||||||
|
|
||||||
response_409 = HttpResponse(json.dumps({}),
|
response_409 = HttpResponse(json.dumps({}),
|
||||||
status=409, content_type=json_content_type)
|
status=409,
|
||||||
|
content_type=json_content_type)
|
||||||
|
|
||||||
x_wopi_override = request.META.get('HTTP_X_WOPI_OVERRIDE', None)
|
x_wopi_override = request.META.get('HTTP_X_WOPI_OVERRIDE', None)
|
||||||
x_wopi_lock = request.META.get('HTTP_X_WOPI_LOCK', None)
|
x_wopi_lock = request.META.get('HTTP_X_WOPI_LOCK', None)
|
||||||
@@ -238,7 +278,9 @@ class WOPIFilesView(APIView):
|
|||||||
# If the file is currently locked
|
# If the file is currently locked
|
||||||
# and the X-WOPI-OldLock value does NOT match the lock currently on the file
|
# and the X-WOPI-OldLock value does NOT match the lock currently on the file
|
||||||
# the host must return a “lock mismatch” response (409 Conflict)
|
# the host must return a “lock mismatch” response (409 Conflict)
|
||||||
# and include an X-WOPI-Lock response header containing the value of the current lock on the file
|
# and include an X-WOPI-Lock response header
|
||||||
|
# containing the value of the current lock on the file
|
||||||
|
|
||||||
response_409['X-WOPI-Lock'] = current_lock_id
|
response_409['X-WOPI-Lock'] = current_lock_id
|
||||||
return response_409
|
return response_409
|
||||||
else:
|
else:
|
||||||
@@ -317,7 +359,9 @@ class WOPIFilesView(APIView):
|
|||||||
logger.info('HTTP_X_WOPI_LOCK: %s' % x_wopi_lock)
|
logger.info('HTTP_X_WOPI_LOCK: %s' % x_wopi_lock)
|
||||||
logger.info('HTTP_X_WOPI_OLDLOCK: %s' % x_wopi_oldlock)
|
logger.info('HTTP_X_WOPI_OLDLOCK: %s' % x_wopi_oldlock)
|
||||||
return HttpResponse(json.dumps({'error_msg': 'HTTP_X_WOPI_OVERRIDE invalid'}),
|
return HttpResponse(json.dumps({'error_msg': 'HTTP_X_WOPI_OVERRIDE invalid'}),
|
||||||
status=401, content_type=json_content_type)
|
status=401,
|
||||||
|
content_type=json_content_type)
|
||||||
|
|
||||||
|
|
||||||
class WOPIFilesContentsView(APIView):
|
class WOPIFilesContentsView(APIView):
|
||||||
|
|
||||||
@@ -329,7 +373,7 @@ class WOPIFilesContentsView(APIView):
|
|||||||
token = request.GET.get('access_token', None)
|
token = request.GET.get('access_token', None)
|
||||||
info_dict = get_file_info_by_token(token)
|
info_dict = get_file_info_by_token(token)
|
||||||
repo_id = info_dict['repo_id']
|
repo_id = info_dict['repo_id']
|
||||||
file_path= info_dict['file_path']
|
file_path = info_dict['file_path']
|
||||||
obj_id = info_dict['obj_id']
|
obj_id = info_dict['obj_id']
|
||||||
|
|
||||||
if not obj_id:
|
if not obj_id:
|
||||||
@@ -339,7 +383,10 @@ class WOPIFilesContentsView(APIView):
|
|||||||
file_name = os.path.basename(file_path)
|
file_name = os.path.basename(file_path)
|
||||||
try:
|
try:
|
||||||
fileserver_token = seafile_api.get_fileserver_access_token(repo_id,
|
fileserver_token = seafile_api.get_fileserver_access_token(repo_id,
|
||||||
obj_id, 'view', '', use_onetime = False)
|
obj_id,
|
||||||
|
'view',
|
||||||
|
'',
|
||||||
|
use_onetime=False)
|
||||||
except SearpcError as e:
|
except SearpcError as e:
|
||||||
logger.error(e)
|
logger.error(e)
|
||||||
return HttpResponse(json.dumps({}), status=500,
|
return HttpResponse(json.dumps({}), status=500,
|
||||||
@@ -367,15 +414,17 @@ class WOPIFilesContentsView(APIView):
|
|||||||
info_dict = get_file_info_by_token(token)
|
info_dict = get_file_info_by_token(token)
|
||||||
request_user = info_dict['request_user']
|
request_user = info_dict['request_user']
|
||||||
repo_id = info_dict['repo_id']
|
repo_id = info_dict['repo_id']
|
||||||
file_path= info_dict['file_path']
|
file_path = info_dict['file_path']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
file_obj = request.read()
|
file_obj = request.read()
|
||||||
|
|
||||||
# get file update url
|
# get file update url
|
||||||
fake_obj_id = {'online_office_update': True,}
|
fake_obj_id = {'online_office_update': True}
|
||||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||||
json.dumps(fake_obj_id), 'update', request_user)
|
json.dumps(fake_obj_id),
|
||||||
|
'update',
|
||||||
|
request_user)
|
||||||
|
|
||||||
if not token:
|
if not token:
|
||||||
return HttpResponse(json.dumps({}), status=500,
|
return HttpResponse(json.dumps({}), status=500,
|
||||||
|
Reference in New Issue
Block a user