diff --git a/frontend/src/components/common/notice-item.js b/frontend/src/components/common/notice-item.js
index 384ebd9ee8..b63343d4dc 100644
--- a/frontend/src/components/common/notice-item.js
+++ b/frontend/src/components/common/notice-item.js
@@ -33,6 +33,7 @@ class NoticeItem extends React.Component {
let groupStaff = detail.group_staff_name;
+ // group name does not support special characters
let userHref = siteRoot + 'profile/' + detail.group_staff_email + '/';
let groupHref = siteRoot + 'group/' + detail.group_id + '/';
let groupName = detail.group_name;
@@ -58,15 +59,22 @@ class NoticeItem extends React.Component {
let path = detail.path;
let notice = '';
- let repoLink = '' + repoName + '';
+ // 1. handle translate
if (path === '/') { // share repo
notice = gettext('{share_from} has shared a library named {repo_link} to you.');
} else { // share folder
notice = gettext('{share_from} has shared a folder named {repo_link} to you.');
}
+ // 2. handle xss(cross-site scripting)
notice = notice.replace('{share_from}', shareFrom);
- notice = notice.replace('{repo_link}', repoLink);
+ notice = notice.replace('{repo_link}', `{tagA}${repoName}{/tagA}`);
+ notice = Utils.HTMLescape(notice);
+
+ // 3. add jump link
+ notice = notice.replace('{tagA}', ``);
+ notice = notice.replace('{/tagA}', '');
+
return {avatar_url, notice};
}
@@ -84,16 +92,24 @@ class NoticeItem extends React.Component {
let path = detail.path;
let notice = '';
- let repoLink = '' + repoName + '';
- let groupLink = '' + groupName + '';
+ // 1. handle translate
if (path === '/') {
notice = gettext('{share_from} has shared a library named {repo_link} to group {group_link}.');
} else {
notice = gettext('{share_from} has shared a folder named {repo_link} to group {group_link}.');
}
+
+ // 2. handle xss(cross-site scripting)
notice = notice.replace('{share_from}', shareFrom);
- notice = notice.replace('{repo_link}', repoLink);
- notice = notice.replace('{group_link}', groupLink);
+ notice = notice.replace('{repo_link}', `{tagA}${repoName}{/tagA}`);
+ notice = notice.replace('{group_link}', `{tagB}${groupName}{/tagB}`);
+ notice = Utils.HTMLescape(notice);
+
+ // 3. add jump link
+ notice = notice.replace('{tagA}', ``);
+ notice = notice.replace('{/tagA}', '');
+ notice = notice.replace('{tagB}', ``);
+ notice = notice.replace('{/tagB}', '');
return {avatar_url, notice};
}
@@ -105,32 +121,50 @@ class NoticeItem extends React.Component {
let repoName = detail.repo_name;
let repoUrl = siteRoot + 'library/' + detail.repo_id + '/' + repoName + '/';
+ // 1. handle translate
let notice = gettext('{user} has transfered a library named {repo_link} to you.');
- let repoLink = '' + repoName + '';
+
+ // 2. handle xss(cross-site scripting)
notice = notice.replace('{user}', repoOwner);
- notice = notice.replace('{repo_link}', repoLink);
+ notice = notice.replace('{repo_link}', `{tagA}${repoName}{/tagA}`);
+ notice = Utils.HTMLescape(notice);
+
+ // 3. add jump link
+ notice = notice.replace('{tagA}', ``);
+ notice = notice.replace('{/tagA}', '');
return {avatar_url, notice};
}
if (noticeType === MSG_TYPE_FILE_UPLOADED) {
let avatar_url = detail.uploaded_user_avatar_url;
let fileName = detail.file_name;
- let fileLink = siteRoot + 'lib/' + detail.repo_id + '/' + 'file' + Utils.encodePath(detail.file_path);
+ let fileLink = siteRoot + 'lib/' + detail.repo_id + '/' + 'file' + detail.file_path;
let folderName = detail.folder_name;
- let folderLink = siteRoot + 'library/' + detail.repo_id + '/' + detail.repo_name + Utils.encodePath(detail.folder_path);
+ let folderLink = siteRoot + 'library/' + detail.repo_id + '/' + detail.repo_name + detail.folder_path;
let notice = '';
if (detail.repo_id) { // todo is repo exist ?
- let uploadFileLink = '' + fileName + '';
- let uploadedLink = '' + folderName + '';
+ // 1. handle translate
+ notice = gettext('A file named {upload_file_link} is uploaded to {uploaded_link}.');
- notice = gettext('A file named {upload_file_link} is uploaded to {uploaded_link}.');
- notice = notice.replace('{upload_file_link}', uploadFileLink);
- notice = notice.replace('{uploaded_link}', uploadedLink);
+ // 2. handle xss(cross-site scripting)
+ notice = notice.replace('{upload_file_link}', `{tagA}${fileName}{/tagA}`);
+ notice = notice.replace('{uploaded_link}', `{tagB}${folderName}{/tagB}`);
+ notice = Utils.HTMLescape(notice);
+
+ // 3. add jump link
+ notice = notice.replace('{tagA}', ``);
+ notice = notice.replace('{/tagA}', '');
+ notice = notice.replace('{tagB}', ``);
+ notice = notice.replace('{/tagB}', '');
} else {
+ // 1. handle translate
notice = gettext('A file named {upload_file_link} is uploaded to {uploaded_link}.');
- notice = notice.replace('{upload_file_link}', fileName);
- notice = notice.replace('{uploaded_link}', 'Deleted Library');
+
+ // 2. handle xss(cross-site scripting)
+ notice = notice.replace('{upload_file_link}', `${fileName}`);
+ notice = Utils.HTMLescape(notice);
+ notice = notice.replace('{uploaded_link}', `Deleted Library`);
}
return {avatar_url, notice};
}
@@ -144,10 +178,17 @@ class NoticeItem extends React.Component {
let fileName = detail.file_name;
let fileUrl = siteRoot + 'lib/' + detail.repo_id + '/' + 'file' + detail.file_path;
+ // 1. handle translate
let notice = gettext('File {file_link} has a new comment form user {author}.');
- let fileLink = '' + fileName + '';
- notice = notice.replace('{file_link}', fileLink);
+
+ // 2. handle xss(cross-site scripting)
+ notice = notice.replace('{file_link}', `{tagA}${fileName}{/tagA}`);
notice = notice.replace('{author}', author);
+ notice = Utils.HTMLescape(notice);
+
+ // 3. add jump link
+ notice = notice.replace('{tagA}', ``);
+ notice = notice.replace('{/tagA}', '');
return {avatar_url, notice};
}
diff --git a/frontend/src/pages/wiki/main-panel.js b/frontend/src/pages/wiki/main-panel.js
index feee738f6f..48d4ad1125 100644
--- a/frontend/src/pages/wiki/main-panel.js
+++ b/frontend/src/pages/wiki/main-panel.js
@@ -84,6 +84,7 @@ class MainPanel extends Component {
const isViewingFile = this.props.pathExist && !this.props.isDataLoading && this.props.isViewFile;
return (
+
{this.props.content}
{!username &&
diff --git a/frontend/src/shared-dir-view.js b/frontend/src/shared-dir-view.js
index c3aca42854..8201ab2877 100644
--- a/frontend/src/shared-dir-view.js
+++ b/frontend/src/shared-dir-view.js
@@ -20,7 +20,7 @@ moment.locale(window.app.config.lang);
let loginUser = window.app.pageOptions.name;
const {
- token, dirName, sharedBy,
+ token, dirName, dirPath, sharedBy,
repoID, path,
mode, thumbnailSize, zipped,
trafficOverLimit, canDownload,
@@ -309,7 +309,7 @@ class SharedDirView extends React.Component {
ref={uploader => this.uploader = uploader}
dragAndDrop={false}
token={token}
- path={path}
+ path={dirPath}
repoID={repoID}
onFileUploadSuccess={this.onFileUploadSuccess}
/>
diff --git a/seahub/api2/endpoints/admin/address_book/groups.py b/seahub/api2/endpoints/admin/address_book/groups.py
index 4c0c28175b..c5bc412057 100644
--- a/seahub/api2/endpoints/admin/address_book/groups.py
+++ b/seahub/api2/endpoints/admin/address_book/groups.py
@@ -24,6 +24,7 @@ from seahub.api2.utils import to_python_boolean, api_error
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.permissions import IsProVersion
from seahub.api2.authentication import TokenAuthentication
+from seahub.auth.models import ExternalDepartment
logger = logging.getLogger(__name__)
@@ -254,6 +255,12 @@ class AdminAddressBookGroup(APIView):
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
+ # del external department
+ try:
+ ExternalDepartment.objects.delete_by_group_id(group_id)
+ except Exception as e:
+ logger.error(e)
+
# send admin operation log signal
group_owner = group.creator_name
group_name = group.group_name
diff --git a/seahub/api2/endpoints/admin/dingtalk.py b/seahub/api2/endpoints/admin/dingtalk.py
index 54ab6a3bdf..11a9c9502f 100644
--- a/seahub/api2/endpoints/admin/dingtalk.py
+++ b/seahub/api2/endpoints/admin/dingtalk.py
@@ -26,13 +26,14 @@ from seahub.auth.models import SocialAuthUser
from seahub.profile.models import Profile
from seahub.avatar.models import Avatar
from seahub.group.utils import validate_group_name
+from seahub.auth.models import ExternalDepartment
from seahub.dingtalk.utils import dingtalk_get_access_token
from seahub.dingtalk.settings import ENABLE_DINGTALK, \
DINGTALK_DEPARTMENT_LIST_DEPARTMENT_URL, \
DINGTALK_DEPARTMENT_GET_DEPARTMENT_URL, \
DINGTALK_DEPARTMENT_GET_DEPARTMENT_USER_LIST_URL, \
- DINGTALK_DEPARTMENT_USER_SIZE
+ DINGTALK_DEPARTMENT_USER_SIZE, DINGTALK_PROVIDER
DEPARTMENT_OWNER = 'system admin'
@@ -232,15 +233,6 @@ class AdminDingtalkDepartmentsImport(APIView):
throttle_classes = (UserRateThrottle,)
permission_classes = (IsAdminUser, IsProVersion)
- def _admin_check_group_name_conflict(self, new_group_name):
- checked_groups = ccnet_api.search_groups(new_group_name, -1, -1)
-
- for g in checked_groups:
- if g.group_name == new_group_name:
- return True, g
-
- return False, None
-
def _api_department_success_msg(self, department_obj_id, department_obj_name, group_id):
return {
'type': 'department',
@@ -301,12 +293,14 @@ class AdminDingtalkDepartmentsImport(APIView):
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# get department list
+ # https://developers.dingtalk.com/document/app/obtain-the-department-list
data = {'access_token': access_token, 'id': department_id}
current_department_resp_json = requests.get(DINGTALK_DEPARTMENT_GET_DEPARTMENT_URL, params=data).json()
current_department_list = [current_department_resp_json]
sub_department_resp_json = requests.get(DINGTALK_DEPARTMENT_LIST_DEPARTMENT_URL, params=data).json()
sub_department_list = sub_department_resp_json.get('department', [])
department_list = current_department_list + sub_department_list
+ department_list = sorted(department_list, key=lambda x:x['id'])
# get department user list
data = {
@@ -327,6 +321,7 @@ class AdminDingtalkDepartmentsImport(APIView):
# check department argument
new_group_name = department_obj.get('name')
department_obj_id = department_obj.get('id')
+ parent_department_id = department_obj.get('parentid', 0)
if department_obj_id is None or not new_group_name or not validate_group_name(new_group_name):
failed_msg = self._api_department_failed_msg(
department_obj_id, new_group_name, '部门参数错误')
@@ -337,7 +332,6 @@ class AdminDingtalkDepartmentsImport(APIView):
if index == 0:
parent_group_id = -1
else:
- parent_department_id = department_obj.get('parentid')
parent_group_id = department_map_to_group_dict.get(parent_department_id)
if parent_group_id is None:
@@ -346,10 +340,11 @@ class AdminDingtalkDepartmentsImport(APIView):
failed.append(failed_msg)
continue
- # check department exist by group name
- exist, exist_group = self._admin_check_group_name_conflict(new_group_name)
- if exist:
- department_map_to_group_dict[department_obj_id] = exist_group.id
+ # check department exist
+ exist_department = ExternalDepartment.objects.get_by_provider_and_outer_id(
+ DINGTALK_PROVIDER, department_obj_id)
+ if exist_department:
+ department_map_to_group_dict[department_obj_id] = exist_department.group_id
failed_msg = self._api_department_failed_msg(
department_obj_id, new_group_name, '部门已存在')
failed.append(failed_msg)
@@ -362,6 +357,12 @@ class AdminDingtalkDepartmentsImport(APIView):
seafile_api.set_group_quota(group_id, -2)
+ ExternalDepartment.objects.create(
+ group_id=group_id,
+ provider=DINGTALK_PROVIDER,
+ outer_id=department_obj_id,
+ )
+
department_map_to_group_dict[department_obj_id] = group_id
success_msg = self._api_department_success_msg(
department_obj_id, new_group_name, group_id)
diff --git a/seahub/api2/endpoints/admin/users.py b/seahub/api2/endpoints/admin/users.py
index 3da711ee79..4b0e960130 100644
--- a/seahub/api2/endpoints/admin/users.py
+++ b/seahub/api2/endpoints/admin/users.py
@@ -13,6 +13,7 @@ from rest_framework.views import APIView
from django.db.models import Q
from django.core.cache import cache
from django.utils.translation import ugettext as _
+from django.utils.timezone import make_naive, is_aware
from seaserv import seafile_api, ccnet_api
@@ -81,17 +82,32 @@ def get_user_last_access_time(email, last_login_time):
if update_events:
update_last_access = update_events[0].timestamp
+ # before make_naive
+ # 2021-04-09 05:32:30+00:00
+ # tzinfo: UTC
+
+ # after make_naive
+ # 2021-04-09 13:32:30
+ # tzinfo: None
last_access_time_list = []
if last_login_time:
+ if is_aware(last_login_time):
+ last_login_time = make_naive(last_login_time)
last_access_time_list.append(last_login_time)
if device_last_access:
+ if is_aware(device_last_access):
+ device_last_access = make_naive(device_last_access)
last_access_time_list.append(device_last_access)
if audit_last_access:
+ if is_aware(audit_last_access):
+ audit_last_access = make_naive(audit_last_access)
last_access_time_list.append(utc_to_local(audit_last_access))
if update_last_access:
+ if is_aware(update_last_access):
+ update_last_access = make_naive(update_last_access)
last_access_time_list.append(utc_to_local(update_last_access))
if not last_access_time_list:
@@ -914,8 +930,15 @@ class AdminSearchUser(APIView):
user.institution = profile.institution
data = []
+ has_appended = []
+
for user in users:
+ if user.email in has_appended:
+ continue
+ else:
+ has_appended.append(user.email)
+
info = {}
info['email'] = user.email
info['name'] = email2nickname(user.email)
diff --git a/seahub/api2/endpoints/admin/work_weixin.py b/seahub/api2/endpoints/admin/work_weixin.py
index bdb0b9c49c..bcc51b0772 100644
--- a/seahub/api2/endpoints/admin/work_weixin.py
+++ b/seahub/api2/endpoints/admin/work_weixin.py
@@ -26,6 +26,7 @@ from seahub.base.accounts import User
from seahub.utils.auth import gen_user_virtual_id
from seahub.auth.models import SocialAuthUser
from seahub.group.utils import validate_group_name
+from seahub.auth.models import ExternalDepartment
logger = logging.getLogger(__name__)
WORK_WEIXIN_DEPARTMENT_FIELD = 'department'
@@ -226,6 +227,7 @@ class AdminWorkWeixinDepartmentsImport(APIView):
permission_classes = (IsAdminUser,)
def _list_departments_from_work_weixin(self, access_token, department_id):
+ # https://work.weixin.qq.com/api/doc/90000/90135/90208
data = {
'access_token': access_token,
'id': department_id,
@@ -264,15 +266,6 @@ class AdminWorkWeixinDepartmentsImport(APIView):
return api_response_dic[WORK_WEIXIN_DEPARTMENT_MEMBERS_FIELD]
- def _admin_check_group_name_conflict(self, new_group_name):
- checked_groups = ccnet_api.search_groups(new_group_name, -1, -1)
-
- for g in checked_groups:
- if g.group_name == new_group_name:
- return True, g
-
- return False, None
-
def _api_department_success_msg(self, department_obj_id, department_obj_name, group_id):
return {
'type': 'department',
@@ -341,6 +334,7 @@ class AdminWorkWeixinDepartmentsImport(APIView):
if api_department_list is None:
error_msg = '获取企业微信组织架构失败'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
+ api_department_list = sorted(api_department_list, key=lambda x:x['id'])
# list department members from work weixin
api_user_list = self._list_department_members_from_work_weixin(access_token, department_id)
@@ -357,6 +351,7 @@ class AdminWorkWeixinDepartmentsImport(APIView):
# check department argument
new_group_name = department_obj.get('name')
department_obj_id = department_obj.get('id')
+ parent_department_id = department_obj.get('parentid', 0)
if department_obj_id is None or not new_group_name or not validate_group_name(new_group_name):
failed_msg = self._api_department_failed_msg(
department_obj_id, new_group_name, '部门参数错误')
@@ -367,7 +362,6 @@ class AdminWorkWeixinDepartmentsImport(APIView):
if index == 0:
parent_group_id = -1
else:
- parent_department_id = department_obj.get('parentid')
parent_group_id = department_map_to_group_dict.get(parent_department_id)
if parent_group_id is None:
@@ -376,10 +370,11 @@ class AdminWorkWeixinDepartmentsImport(APIView):
failed.append(failed_msg)
continue
- # check department exist by group name
- exist, exist_group = self._admin_check_group_name_conflict(new_group_name)
- if exist:
- department_map_to_group_dict[department_obj_id] = exist_group.id
+ # check department exist
+ exist_department = ExternalDepartment.objects.get_by_provider_and_outer_id(
+ WORK_WEIXIN_PROVIDER, department_obj_id)
+ if exist_department:
+ department_map_to_group_dict[department_obj_id] = exist_department.group_id
failed_msg = self._api_department_failed_msg(
department_obj_id, new_group_name, '部门已存在')
failed.append(failed_msg)
@@ -392,6 +387,12 @@ class AdminWorkWeixinDepartmentsImport(APIView):
seafile_api.set_group_quota(group_id, -2)
+ ExternalDepartment.objects.create(
+ group_id=group_id,
+ provider=WORK_WEIXIN_PROVIDER,
+ outer_id=department_obj_id,
+ )
+
department_map_to_group_dict[department_obj_id] = group_id
success_msg = self._api_department_success_msg(
department_obj_id, new_group_name, group_id)
diff --git a/seahub/api2/views.py b/seahub/api2/views.py
index 7af19ccae1..0aa0c00763 100644
--- a/seahub/api2/views.py
+++ b/seahub/api2/views.py
@@ -392,11 +392,8 @@ class AccountInfo(APIView):
username, file_updates_email_interval)
if collaborate_email_interval is not None:
- if collaborate_email_interval <= 0:
- UserOptions.objects.unset_collaborate_email_interval(username)
- else:
- UserOptions.objects.set_collaborate_email_interval(
- username, collaborate_email_interval)
+ UserOptions.objects.set_collaborate_email_interval(
+ username, collaborate_email_interval)
return Response(self._get_account_info(request))
diff --git a/seahub/auth/models.py b/seahub/auth/models.py
index 36e7222253..5fe2a87545 100644
--- a/seahub/auth/models.py
+++ b/seahub/auth/models.py
@@ -162,6 +162,28 @@ class SocialAuthUser(models.Model):
db_table = 'social_auth_usersocialauth'
+class ExternalDepartmentManager(models.Manager):
+ def get_by_provider_and_outer_id(self, provider, outer_id):
+ return self.filter(provider=provider, outer_id=outer_id).first()
+
+ def delete_by_group_id(self, group_id):
+ self.filter(group_id=group_id).delete()
+
+
+class ExternalDepartment(models.Model):
+ group_id = models.IntegerField(unique=True)
+ provider = models.CharField(max_length=32)
+ outer_id = models.BigIntegerField()
+
+ objects = ExternalDepartmentManager()
+
+ class Meta:
+ """Meta data"""
+ app_label = "base"
+ unique_together = ('provider', 'outer_id')
+ db_table = 'external_department'
+
+
# # handle signals
from django.dispatch import receiver
from registration.signals import user_deleted
diff --git a/seahub/dingtalk/management/__init__.py b/seahub/dingtalk/management/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/seahub/dingtalk/management/commands/__init__.py b/seahub/dingtalk/management/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/seahub/dingtalk/management/commands/fix_dingtalk_departments_sync.py b/seahub/dingtalk/management/commands/fix_dingtalk_departments_sync.py
new file mode 100644
index 0000000000..747c3d384d
--- /dev/null
+++ b/seahub/dingtalk/management/commands/fix_dingtalk_departments_sync.py
@@ -0,0 +1,122 @@
+import logging
+import requests
+import json
+from datetime import datetime
+
+from django.core.management.base import BaseCommand
+
+from seaserv import ccnet_api
+from seahub.dingtalk.utils import dingtalk_get_access_token
+from seahub.dingtalk.settings import ENABLE_DINGTALK, \
+ DINGTALK_DEPARTMENT_LIST_DEPARTMENT_URL, DINGTALK_PROVIDER
+from seahub.auth.models import ExternalDepartment
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ help = "Fix sync the imported dingtalk departments to the database."
+
+ def println(self, msg):
+ self.stdout.write('[%s] %s\n' % (str(datetime.now()), msg))
+
+ def log_error(self, msg):
+ logger.error(msg)
+ self.println(msg)
+
+ def log_info(self, msg):
+ logger.info(msg)
+ self.println(msg)
+
+ def log_debug(self, msg):
+ logger.debug(msg)
+ self.println(msg)
+
+ def handle(self, *args, **options):
+ self.log_debug('Start fix sync dingtalk departments...')
+ self.do_action()
+ self.log_debug('Finish fix sync dingtalk departments.\n')
+
+ def get_group_by_name(self, group_name):
+ checked_groups = ccnet_api.search_groups(group_name, -1, -1)
+
+ for g in checked_groups:
+ if g.group_name == group_name:
+ return g
+
+ return None
+
+ def list_departments_from_dingtalk(self, access_token):
+ # https://developers.dingtalk.com/document/app/obtain-the-department-list
+ data = {
+ 'access_token': access_token,
+ }
+ api_response = requests.get(
+ DINGTALK_DEPARTMENT_LIST_DEPARTMENT_URL, params=data)
+ api_response_dic = api_response.json()
+
+ if not api_response_dic:
+ self.log_error('can not get dingtalk departments response')
+ return None
+
+ if 'department' not in api_response_dic:
+ self.log_error(json.dumps(api_response_dic))
+ self.log_error(
+ 'can not get department list in dingtalk departments response')
+ return None
+
+ return api_response_dic['department']
+
+ def do_action(self):
+ # dingtalk check
+ if not ENABLE_DINGTALK:
+ self.log_error('Feature is not enabled.')
+ return
+
+ access_token = dingtalk_get_access_token()
+ if not access_token:
+ self.log_error('can not get dingtalk access_token')
+ return
+
+ # get department list
+ # https://developers.dingtalk.com/document/app/obtain-the-department-list-v2
+ api_department_list = self.list_departments_from_dingtalk(
+ access_token)
+ if api_department_list is None:
+ self.log_error('获取钉钉组织架构失败')
+ return
+ api_department_list = sorted(
+ api_department_list, key=lambda x: x['id'])
+
+ self.log_debug(
+ 'Total %d dingtalk departments.' % len(api_department_list))
+
+ # main
+ count = 0
+ exists_count = 0
+ for department_obj in api_department_list:
+ # check department argument
+ group_name = department_obj.get('name')
+ department_obj_id = department_obj.get('id')
+ if department_obj_id is None or not group_name:
+ continue
+
+ # check department exist
+ exist_department = ExternalDepartment.objects.get_by_provider_and_outer_id(
+ DINGTALK_PROVIDER, department_obj_id)
+ if exist_department:
+ exists_count += 1
+ continue
+
+ # sync to db
+ group = self.get_group_by_name(group_name)
+ if group:
+ ExternalDepartment.objects.create(
+ group_id=group.id,
+ provider=DINGTALK_PROVIDER,
+ outer_id=department_obj_id,
+ )
+ count += 1
+
+ self.log_debug('%d dingtalk departments exists in db.' % exists_count)
+ self.log_debug('Sync %d dingtalk departments to db.' % count)
diff --git a/seahub/dingtalk/settings.py b/seahub/dingtalk/settings.py
index 89eef07e75..504deb6d2e 100644
--- a/seahub/dingtalk/settings.py
+++ b/seahub/dingtalk/settings.py
@@ -28,3 +28,6 @@ DINGTALK_DEPARTMENT_USER_SIZE = 100
DINGTALK_MESSAGE_SEND_TO_CONVERSATION_URL = getattr(settings, 'DINGTALK_MESSAGE_SEND_TO_CONVERSATION_URL', 'https://oapi.dingtalk.com/topapi/message/corpconversation/asyncsend_v2')
DINGTALK_GET_DETAILED_USER_INFO_URL = getattr(settings, 'DINGTALK_GET_DETAILED_USER_INFO_URL', 'https://oapi.dingtalk.com/user/get')
+
+# constants
+DINGTALK_PROVIDER = 'dingtalk'
diff --git a/seahub/notifications/management/commands/send_notices.py b/seahub/notifications/management/commands/send_notices.py
index a4e37217ac..5996030f93 100644
--- a/seahub/notifications/management/commands/send_notices.py
+++ b/seahub/notifications/management/commands/send_notices.py
@@ -23,7 +23,8 @@ from seahub.invitations.models import Invitation
from seahub.profile.models import Profile
from seahub.constants import HASH_URLS
from seahub.utils import get_site_name
-from seahub.options.models import UserOptions, KEY_COLLABORATE_EMAIL_INTERVAL, KEY_COLLABORATE_LAST_EMAILED_TIME
+from seahub.options.models import UserOptions, KEY_COLLABORATE_EMAIL_INTERVAL, \
+ KEY_COLLABORATE_LAST_EMAILED_TIME, DEFAULT_COLLABORATE_EMAIL_INTERVAL
# Get an instance of a logger
logger = logging.getLogger(__name__)
@@ -208,34 +209,49 @@ class Command(BaseCommand):
def get_user_language(self, username):
return Profile.objects.get_user_language(username)
- def do_action(self):
- emails = []
- user_file_updates_email_intervals = []
- for ele in UserOptions.objects.filter(
- option_key=KEY_COLLABORATE_EMAIL_INTERVAL):
- try:
- user_file_updates_email_intervals.append(
- (ele.email, int(ele.option_val))
- )
- emails.append(ele.email)
- except Exception as e:
- logger.error(e)
- self.stderr.write('[%s]: %s' % (str(datetime.datetime.now()), e))
- continue
+ def get_user_intervals_and_notices(self):
+ """
+ filter users who have collaborate-notices in last longest interval
+ And right now, the longest interval is DEFAULT_COLLABORATE_EMAIL_INTERVAL
+ """
+ last_longest_interval_time = datetime.datetime.now() - datetime.timedelta(
+ seconds=DEFAULT_COLLABORATE_EMAIL_INTERVAL)
- user_last_emailed_time_dict = {}
- for ele in UserOptions.objects.filter(option_key=KEY_COLLABORATE_LAST_EMAILED_TIME).filter(email__in=emails):
+ all_unseen_notices = UserNotification.objects.get_all_notifications(
+ seen=False, time_since=last_longest_interval_time).order_by('-timestamp')
+
+ results = {}
+ for notice in all_unseen_notices:
+ if notice.to_user not in results:
+ results[notice.to_user] = {'notices': [notice], 'interval': DEFAULT_COLLABORATE_EMAIL_INTERVAL}
+ else:
+ results[notice.to_user]['notices'].append(notice)
+
+ user_options = UserOptions.objects.filter(
+ email__in=results.keys(), option_key=KEY_COLLABORATE_EMAIL_INTERVAL)
+ for option in user_options:
+ email, interval = option.email, option.option_val
try:
- user_last_emailed_time_dict[ele.email] = datetime.datetime.strptime(
- ele.option_val, "%Y-%m-%d %H:%M:%S")
- except Exception as e:
- logger.error(e)
- self.stderr.write('[%s]: %s' % (str(datetime.datetime.now()), e))
- continue
+ interval = int(interval)
+ except ValueError:
+ logger.warning('user: %s, %s invalid, val: %s', email, KEY_COLLABORATE_EMAIL_INTERVAL, interval)
+ interval = DEFAULT_COLLABORATE_EMAIL_INTERVAL
+ if interval <= 0:
+ del results[email]
+ else:
+ results[email]['interval'] = interval
+
+ return [(key, value['interval'], value['notices']) for key, value in results.items()]
+
+
+ def do_action(self):
+ user_interval_notices = self.get_user_intervals_and_notices()
+ last_emailed_list = UserOptions.objects.filter(option_key=KEY_COLLABORATE_LAST_EMAILED_TIME).values_list('email', 'option_val')
+ user_last_emailed_time_dict = {le[0]: datetime.datetime.strptime(le[1], "%Y-%m-%d %H:%M:%S") for le in last_emailed_list}
# save current language
cur_language = translation.get_language()
- for (to_user, interval_val) in user_file_updates_email_intervals:
+ for (to_user, interval_val, notices) in user_interval_notices:
# get last_emailed_time if any, defaults to today 00:00:00.0
last_emailed_time = user_last_emailed_time_dict.get(to_user, None)
now = datetime.datetime.now().replace(microsecond=0)
@@ -246,10 +262,8 @@ class Command(BaseCommand):
if (now - last_emailed_time).total_seconds() < interval_val:
continue
- # get notices
- user_notices_qs = UserNotification.objects.get_all_notifications(seen=False, time_since=last_emailed_time)
- user_notices, count = list(user_notices_qs), user_notices_qs.count()
- if not count:
+ user_notices = list(filter(lambda notice: notice.timestamp > last_emailed_time, notices))
+ if not user_notices:
continue
# get and active user language
@@ -312,7 +326,7 @@ class Command(BaseCommand):
to_user = contact_email # use contact email if any
c = {
'to_user': to_user,
- 'notice_count': count,
+ 'notice_count': len(notices),
'notices': notices,
'user_name': user_name,
}
diff --git a/seahub/options/models.py b/seahub/options/models.py
index 320bfb96ad..19ff6d1fa9 100644
--- a/seahub/options/models.py
+++ b/seahub/options/models.py
@@ -39,6 +39,8 @@ KEY_FILE_UPDATES_LAST_EMAILED_TIME = "file_updates_last_emailed_time"
KEY_COLLABORATE_EMAIL_INTERVAL = 'collaborate_email_interval'
KEY_COLLABORATE_LAST_EMAILED_TIME = 'collaborate_last_emailed_time'
+DEFAULT_COLLABORATE_EMAIL_INTERVAL = 3600
+
class CryptoOptionNotSetError(Exception):
pass
diff --git a/seahub/profile/views.py b/seahub/profile/views.py
index 333b7206cf..f2bcc73a8a 100644
--- a/seahub/profile/views.py
+++ b/seahub/profile/views.py
@@ -18,7 +18,7 @@ from seahub.utils import is_org_context, is_pro_version, is_valid_username
from seahub.base.accounts import User, UNUSABLE_PASSWORD
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.contacts.models import Contact
-from seahub.options.models import UserOptions, CryptoOptionNotSetError
+from seahub.options.models import UserOptions, CryptoOptionNotSetError, DEFAULT_COLLABORATE_EMAIL_INTERVAL
from seahub.utils import is_ldap_user
from seahub.utils.two_factor_auth import has_two_factor_auth
from seahub.views import get_owned_repo_list
@@ -87,7 +87,7 @@ def edit_profile(request):
file_updates_email_interval = UserOptions.objects.get_file_updates_email_interval(username)
file_updates_email_interval = file_updates_email_interval if file_updates_email_interval is not None else 0
collaborate_email_interval = UserOptions.objects.get_collaborate_email_interval(username)
- collaborate_email_interval = collaborate_email_interval if collaborate_email_interval is not None else 0
+ collaborate_email_interval = collaborate_email_interval if collaborate_email_interval is not None else DEFAULT_COLLABORATE_EMAIL_INTERVAL
if work_weixin_oauth_check():
enable_wechat_work = True
diff --git a/seahub/settings.py b/seahub/settings.py
index a559f4a8ab..e21f4afa51 100644
--- a/seahub/settings.py
+++ b/seahub/settings.py
@@ -248,6 +248,7 @@ INSTALLED_APPS = [
'seahub.file_tags',
'seahub.related_files',
'seahub.work_weixin',
+ 'seahub.dingtalk',
'seahub.file_participants',
'seahub.repo_api_tokens',
'seahub.abuse_reports',
diff --git a/seahub/templates/view_shared_dir_react.html b/seahub/templates/view_shared_dir_react.html
index 1b9d7cc819..85cb6f7575 100644
--- a/seahub/templates/view_shared_dir_react.html
+++ b/seahub/templates/view_shared_dir_react.html
@@ -20,6 +20,7 @@
window.shared = {
pageOptions: {
dirName: '{{ dir_name|escapejs }}',
+ dirPath: '{{ dir_path|escapejs }}',
sharedBy: '{{ username|email2nickname|escapejs }}',
repoID: '{{repo.id}}',
path: '{{ path|escapejs }}',
diff --git a/seahub/utils/__init__.py b/seahub/utils/__init__.py
index 00d4b791b4..f5ba1566f2 100644
--- a/seahub/utils/__init__.py
+++ b/seahub/utils/__init__.py
@@ -29,6 +29,7 @@ from django.utils.translation import ugettext as _
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseNotModified
from django.utils.http import urlquote
from django.utils.html import escape
+from django.utils.timezone import make_naive, is_aware
from django.views.static import serve as django_static_serve
from seahub.auth import REDIRECT_FIELD_NAME
@@ -1332,6 +1333,12 @@ def get_origin_repo_info(repo_id):
def within_time_range(d1, d2, maxdiff_seconds):
'''Return true if two datetime.datetime object differs less than the given seconds'''
+ if is_aware(d1):
+ d1 = make_naive(d1)
+
+ if is_aware(d2):
+ d2 = make_naive(d2)
+
delta = d2 - d1 if d2 > d1 else d1 - d2
# delta.total_seconds() is only available in python 2.7+
diff = (delta.microseconds + (delta.seconds + delta.days*24*3600) * 1e6) / 1e6
diff --git a/seahub/utils/timeutils.py b/seahub/utils/timeutils.py
index 86b9684b6d..47d9c66c5b 100644
--- a/seahub/utils/timeutils.py
+++ b/seahub/utils/timeutils.py
@@ -60,6 +60,11 @@ def timestamp_to_isoformat_timestr(timestamp):
# https://pypi.org/project/pytz/
def datetime_to_isoformat_timestr(datetime):
+
+ from django.utils.timezone import make_naive, is_aware
+ if is_aware(datetime):
+ datetime = make_naive(datetime)
+
try:
# This library only supports two ways of building a localized time.
# The first is to use the localize() method provided by the pytz library.
diff --git a/seahub/views/repo.py b/seahub/views/repo.py
index b1eb8bb9b2..ded1e6cac3 100644
--- a/seahub/views/repo.py
+++ b/seahub/views/repo.py
@@ -344,6 +344,7 @@ def view_shared_dir(request, fileshare):
'path': req_path,
'username': username,
'dir_name': dir_name,
+ 'dir_path': real_path,
'file_list': file_list,
'dir_list': dir_list,
'zipped': zipped,
diff --git a/seahub/work_weixin/management/__init__.py b/seahub/work_weixin/management/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/seahub/work_weixin/management/commands/__init__.py b/seahub/work_weixin/management/commands/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/seahub/work_weixin/management/commands/fix_work_weixin_departments_sync.py b/seahub/work_weixin/management/commands/fix_work_weixin_departments_sync.py
new file mode 100644
index 0000000000..0d7c08763f
--- /dev/null
+++ b/seahub/work_weixin/management/commands/fix_work_weixin_departments_sync.py
@@ -0,0 +1,121 @@
+import logging
+import requests
+import json
+from datetime import datetime
+
+from django.core.management.base import BaseCommand
+
+from seaserv import ccnet_api
+from seahub.work_weixin.utils import handler_work_weixin_api_response, \
+ get_work_weixin_access_token, admin_work_weixin_departments_check
+from seahub.work_weixin.settings import WORK_WEIXIN_DEPARTMENTS_URL, \
+ WORK_WEIXIN_PROVIDER
+from seahub.auth.models import ExternalDepartment
+
+logger = logging.getLogger(__name__)
+
+
+class Command(BaseCommand):
+ help = "Fix sync the imported work-weixin departments to the database."
+
+ def println(self, msg):
+ self.stdout.write('[%s] %s\n' % (str(datetime.now()), msg))
+
+ def log_error(self, msg):
+ logger.error(msg)
+ self.println(msg)
+
+ def log_info(self, msg):
+ logger.info(msg)
+ self.println(msg)
+
+ def log_debug(self, msg):
+ logger.debug(msg)
+ self.println(msg)
+
+ def handle(self, *args, **options):
+ self.log_debug('Start fix sync work-weixin departments...')
+ self.do_action()
+ self.log_debug('Finish fix sync work-weixin departments.\n')
+
+ def get_group_by_name(self, group_name):
+ checked_groups = ccnet_api.search_groups(group_name, -1, -1)
+
+ for g in checked_groups:
+ if g.group_name == group_name:
+ return g
+
+ return None
+
+ def list_departments_from_work_weixin(self, access_token):
+ # https://work.weixin.qq.com/api/doc/90000/90135/90208
+ data = {
+ 'access_token': access_token,
+ }
+ api_response = requests.get(WORK_WEIXIN_DEPARTMENTS_URL, params=data)
+ api_response_dic = handler_work_weixin_api_response(api_response)
+
+ if not api_response_dic:
+ self.log_error('can not get work weixin departments response')
+ return None
+
+ if 'department' not in api_response_dic:
+ self.log_error(json.dumps(api_response_dic))
+ self.log_error(
+ 'can not get department list in work weixin departments response')
+ return None
+
+ return api_response_dic['department']
+
+ def do_action(self):
+ # work weixin check
+ if not admin_work_weixin_departments_check():
+ self.log_error('Feature is not enabled.')
+ return
+
+ access_token = get_work_weixin_access_token()
+ if not access_token:
+ self.log_error('can not get work weixin access_token')
+ return
+
+ # list departments from work weixin
+ api_department_list = self.list_departments_from_work_weixin(
+ access_token)
+ if api_department_list is None:
+ self.log_error('获取企业微信组织架构失败')
+ return
+ api_department_list = sorted(
+ api_department_list, key=lambda x: x['id'])
+
+ self.log_debug(
+ 'Total %d work-weixin departments.' % len(api_department_list))
+
+ # main
+ count = 0
+ exists_count = 0
+ for department_obj in api_department_list:
+ # check department argument
+ group_name = department_obj.get('name')
+ department_obj_id = department_obj.get('id')
+ if department_obj_id is None or not group_name:
+ continue
+
+ # check department exist
+ exist_department = ExternalDepartment.objects.get_by_provider_and_outer_id(
+ WORK_WEIXIN_PROVIDER, department_obj_id)
+ if exist_department:
+ exists_count += 1
+ continue
+
+ # sync to db
+ group = self.get_group_by_name(group_name)
+ if group:
+ ExternalDepartment.objects.create(
+ group_id=group.id,
+ provider=WORK_WEIXIN_PROVIDER,
+ outer_id=department_obj_id,
+ )
+ count += 1
+
+ self.log_debug('%d work-weixin departments exists in db.' % exists_count)
+ self.log_debug('Sync %d work-weixin departments to db.' % count)
diff --git a/sql/mysql.sql b/sql/mysql.sql
index 50a6166b16..cde8a349b8 100644
--- a/sql/mysql.sql
+++ b/sql/mysql.sql
@@ -1282,3 +1282,13 @@ CREATE TABLE `repo_auto_delete` (
PRIMARY KEY (`id`),
UNIQUE KEY `repo_id` (`repo_id`)
) ENGINE = InnoDB DEFAULT CHARSET=utf8;
+
+CREATE TABLE `external_department` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `group_id` int(11) NOT NULL,
+ `provider` varchar(32) NOT NULL,
+ `outer_id` bigint(20) NOT NULL,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `group_id` (`group_id`),
+ UNIQUE KEY `external_department_provider_outer_id_8dns6vkw_uniq` (`provider`,`outer_id`)
+) ENGINE = InnoDB DEFAULT CHARSET=utf8;
diff --git a/sql/sqlite3.sql b/sql/sqlite3.sql
index 4763f91262..5bd25b1add 100644
--- a/sql/sqlite3.sql
+++ b/sql/sqlite3.sql
@@ -600,4 +600,6 @@ CREATE INDEX "ocm_share_received_from_server_url_10527b80" ON "ocm_share_receive
CREATE INDEX "ocm_share_received_repo_id_9e77a1b9" ON "ocm_share_received" ("repo_id");
CREATE INDEX "ocm_share_received_provider_id_60c873e0" ON "ocm_share_received" ("provider_id");
CREATE TABLE "repo_auto_delete" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "repo_id" varchar(36) NOT NULL UNIQUE, "days" integer NOT NULL);
+CREATE TABLE "external_department" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "group_id" integer NOT NULL UNIQUE, "provider" varchar(32) NOT NULL, "outer_id" bigint NOT NULL);
+CREATE UNIQUE INDEX "external_department_provider_outer_id_8dns6vkw_uniq" ON "external_department" (`provider`,`outer_id`);
COMMIT;