mirror of
https://github.com/haiwen/seahub.git
synced 2025-04-26 18:40:53 +00:00
Python3 master (#4076)
* delete thridpart/social_django * delete social_django in seahub/urls.py * delete social_django in seahub/settings.py * delete seahub/notifications/management/commands/send_wxwork_notices.py * delete social_django in code annotation * delete seahub/social_core * delete tests/seahub/social_core * delete social_core in seahub/urls.py * delete social_core in seahub/settings.py * change app_label to auth in SocialAuthUser model * 2to3 asserts * 2to3 basestring * 2to3 dict * 2to3 except * 2to3 filter * 2to3 future * 2to3 has_key * 2to3 idioms * 2to3 import * 2to3 imports * 2to3 long * 2to3 map * 2to3 next * 2to3 numliterals * 2to3 print * 2to3 raise * 2to3 raw_input * 2to3 reduce * 2to3 reload * 2to3 set_literal * 2to3 unicode * 2to3 urllib * 2to3 ws_comma * 2to3 xrange * 2to3 zip * add pymysql in __init__.py * fix encode and decode in seahub/cconvert.py * fix seafserv_rpc.is_passwd_set in seahub/views/__init__.py * fix smart_unicode to smart_text * fix force_unicode to force_text * delete seaserv.get_session_info * delete seaserv.ccnet_rpc * fix indent error in seahub/auth/middleware.py * update dev-requirements * update test-requirements * update requirements * fix StringIO to BytesIO in thumbnail * fix seaserv.list_inner_pub_repos to seafile_api.get_inner_pub_repo_list * fix seaserv.list_org_inner_pub_repos to seafile_api.list_org_inner_pub_repos * add logger in seahub/utils/__init__.py * fix sort cmp in seahub/views/__init__.py * fix sort cmp in seahub/base/management/commands/export_file_access_log.py * fix sort cmp in seahub/api2/endpoints/repo_trash.py * fix sort cmp in seahub/api2/endpoints/shared_repos.py * fix sort cmp in seahub/api2/endpoints/shared_folders.py * fix sort cmp in seahub/wiki/views.py * fix sort cmp in seahub/api2/endpoints/wiki_pages.py * fix sort cmp in seahub/api2/endpoints/group_libraries.py * fix sort cmp in seahub/base/models.py * fix sort cmp in seahub/api2/endpoints/upload_links.py * fix sort cmp in seahub/views/ajax.py * fix sort cmp in seahub/api2/views.py * fix sort cmp in seahub/views/wiki.py * fix sort cmp in seahub/api2/endpoints/repos.py * fix sort cmp in seahub/api2/endpoints/starred_items.py * fix sort cmp in seahub/views/file.py * fix sort cmp in seahub/api2/endpoints/dir.py * fix sort cmp in seahub/api2/endpoints/share_links.py * fix cmp to cmp_to_key in seahub/api2/endpoints/admin/device_trusted_ip.py * fix cmp to cmp_to_key in tests/api/endpoints/admin/test_device_trusted_ip.py * delete encode('utf-8') in seafile_api.list_dir_by_commit_and_path * delete encode('utf-8') in is_file_starred * delete encode('utf-8') in seafile_api.list_dir_by_path * delete path.encode('utf-8') in seahub/views/file.py * fix os.write to add encode('utf-8') * add encode('utf-8') for hashlib * add encode('utf-8') for hmac * fix with open(file, 'wb') for binary file * fix encode and decode in seahub/utils/hasher.py * fix next in thirdpart/shibboleth/views.py * fix next in seahub/profile/views.py * fix next in seahub/notifications/views.py * fix next in seahub/institutions/views.py * fix next in seahub/options/views.py * fix next in seahub/share/views.py * fix next in seahub/avatar/views.py * fix next in seahub/views/__init__.py * fix next in seahub/group/views.py * fix next in seahub/views/wiki.py * fix next in seahub/views/sysadmin.py * fix next in seahub/views/file.py * fix string.lowercase to string.ascii_lowercase in test * fix open file add 'rb' in test * fix self.user.username in test * add migrations in file_participants * fix list_org_inner_pub_repos to list_org_inner_pub_repos_by_owner * fix from seaserv import is_passwd_set to seafile_api.is_password_set * fix assert bytes resp.content in test * fix seafile_api.get_inner_pub_repo_list to seafile_api.list_inner_pub_repos_by_owner * fix seafile_api.is_passwd_set to seafile_api.is_password_set * fix AccountsApiTest assert length * rewrite sort_devices cmp to operator.lt * fix bytes + str in seahub/api2/views.py * fix assert bytes resp.content in test * fix hashlib encode in seahub/thirdpart/registration/models.py * change app_label to base in SocialAuthUser * fix base64 encode in seahub/base/database_storage/database_storage.py * fix assert bytes resp.content * remove path.decode in def mkstemp() * remove path.decode in FpathToLinkTest * remove str decode in FileTagTest * remove mock_write_xls.assert_called_once() in SysUserAdminExportExcelTest * fix urllib assert in FilesApiTest * fix link fields in FileCommentsTest * fix get_related_users_by_repo() * fix assert list in GetRepoSharedUsersTest * fix create user in AccountTest * fix repeated key in dict seahub/api2/views.py * add drone.yml * update nginx conf in test * update test conf in test * update dist and push after test success * update drone conf to dist and push * fix assert in BeSharedReposTest * fix seafile_api.list_org_inner_pub_repos_by_owner(org_id, username) to seafile_api.list_org_inner_pub_repos(org_id) * fix seafile_api.list_inner_pub_repos_by_owner(username) to seafile_api.get_inner_pub_repo_list() * update pyjwt requirement * update dist branch in drone * add SKIP in dist and push * fix StringIO to BytesIO in seahub/avatar/models.py * fix if org_id > 0 to if org_id and org_id > 0 * remove payment * fix StringIO to BytesIO in seahub/base/database_storage/database_storage.py * fix send_message to seafile_api.publish_event in seahub/drafts/utils.py * fix send_message to seafile_api.publish_event in seahub/api2/views.py * fix send_message to seafile_api.publish_event in seahub/api2/endpoints/repos.py * fix send_message to seafile_api.publish_event in seahub/views/file.py * fix send_message to seafile_api.publish_event in seahub/utils/__init__.py * fix image_file.read encode in seahub/base/database_storage/database_storage.py * fix DatabaseStorageTest * remove .travis.yml * drone branch include master
This commit is contained in:
parent
835015c3a8
commit
99a9ece04e
21
.drone.yml
Normal file
21
.drone.yml
Normal file
@ -0,0 +1,21 @@
|
||||
pipeline:
|
||||
build:
|
||||
image: docker.seafile.top/drone/seafile-pro-builder:v4
|
||||
pull: true
|
||||
secrets: [ github_token, npm_token, travis ]
|
||||
when:
|
||||
branch:
|
||||
event: [ push, pull_request ]
|
||||
include: [ master, python3-master ]
|
||||
exclude: []
|
||||
|
||||
commands:
|
||||
- cd /tmp/seafile-test-deploy && git fetch origin python3:python3 && git checkout python3
|
||||
- ./bootstrap.sh && cd -
|
||||
- export CCNET_CONF_DIR=/tmp/ccnet SEAFILE_CONF_DIR=/tmp/seafile-data
|
||||
- echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" > ~/.npmrc
|
||||
- cd /drone/src/github.com/haiwen/seahub
|
||||
- ./tests/install-deps.sh
|
||||
- pip install -r test-requirements.txt
|
||||
- .travis/test_seahub_changes.sh; rc=$?; if test "$rc" -eq 0; then ./tests/seahubtests.sh init && ./tests/seahubtests.sh runserver && ./tests/seahubtests.sh test; else true; fi
|
||||
- if test $DRONE_COMMIT_BRANCH = "master"; then .travis/dist_and_push.sh; else true; fi
|
@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
: ${PYTHON=python}
|
||||
: ${PYTHON=python3}
|
||||
|
||||
set -e
|
||||
if [[ ${TRAVIS} != "" ]]; then
|
||||
@ -7,23 +7,25 @@ if [[ ${TRAVIS} != "" ]]; then
|
||||
fi
|
||||
|
||||
set -x
|
||||
SEAHUB_TESTSDIR=$(python -c "import os; print os.path.dirname(os.path.realpath('$0'))")
|
||||
SEAHUB_TESTSDIR=$(python -c "import os; print(os.path.dirname(os.path.realpath('$0')))")
|
||||
SEAHUB_SRCDIR=$(dirname "${SEAHUB_TESTSDIR}")
|
||||
|
||||
export PYTHONPATH="/usr/local/lib/python2.7/site-packages:/usr/lib/python2.7/site-packages:${SEAHUB_SRCDIR}/thirdpart:${PYTHONPATH}"
|
||||
export PYTHONPATH="/usr/local/lib/python3.7/site-packages:/usr/local/lib/python3.7/dist-packages:/usr/lib/python3.7/site-packages:/usr/lib/python3.7/dist-packages:${SEAHUB_SRCDIR}/thirdpart:${PYTHONPATH}"
|
||||
cd "$SEAHUB_SRCDIR"
|
||||
set +x
|
||||
|
||||
function commit_dist_files() {
|
||||
git checkout -b dist-$TRAVIS_BRANCH
|
||||
git checkout -b dist-$DRONE_COMMIT_BRANCH
|
||||
git add -u . && git add -A media/assets && git add -A static/scripts && git add -A frontend && git add -A locale
|
||||
git commit -m "[dist] Travis build: #$TRAVIS_BUILD_NUMBER, based on commit $TRAVIS_COMMIT." -m "$TRAVIS_COMMIT_MESSAGE"
|
||||
git config --global user.email "drone@seafile.com"
|
||||
git config --global user.name "Drone CI"
|
||||
git commit -m "[dist][CI SKIP] Drone CI build: #$DRONE_BUILD_NUMBER, based on commit $DRONE_COMMIT." -m "$DRONE_COMMIT_MESSAGE"
|
||||
}
|
||||
|
||||
function upload_files() {
|
||||
echo 'push dist to seahub'
|
||||
git remote add token-origin https://imwhatiam:${GITHUB_PERSONAL_ACCESS_TOKEN}@github.com/haiwen/seahub.git
|
||||
git push -f token-origin dist-$TRAVIS_BRANCH
|
||||
git remote add token-origin https://$GITHUB_TOKEN@github.com/haiwen/seahub.git
|
||||
git push -f token-origin dist-$DRONE_COMMIT_BRANCH
|
||||
}
|
||||
|
||||
function make_dist() {
|
||||
|
@ -5,5 +5,5 @@ transifex-client
|
||||
raven==5.0.0
|
||||
|
||||
mysqlclient==1.3.12
|
||||
pycryptodome==3.7.2
|
||||
pycryptodome
|
||||
psd-tools==1.4
|
||||
|
@ -19,15 +19,15 @@ def check(path):
|
||||
|
||||
def do_update(filename):
|
||||
if 'migrations' in filename:
|
||||
print 'skip migration file: %s' % filename
|
||||
print('skip migration file: %s' % filename)
|
||||
return
|
||||
|
||||
with open(filename) as f:
|
||||
# try read first line of file
|
||||
try:
|
||||
head = [next(f) for x in xrange(1)]
|
||||
head = [next(f) for x in range(1)]
|
||||
except StopIteration:
|
||||
print '%s is empty, skip' % filename
|
||||
print('%s is empty, skip' % filename)
|
||||
return
|
||||
|
||||
copy_str = '# Copyright (c) 2012-2016 Seafile Ltd.'
|
||||
@ -39,11 +39,11 @@ def do_update(filename):
|
||||
need_update = False
|
||||
|
||||
if not need_update:
|
||||
print '%s is ok.' % filename
|
||||
print('%s is ok.' % filename)
|
||||
return
|
||||
|
||||
line_prepender(filename, copy_str)
|
||||
print '%s Done.' % filename
|
||||
print('%s Done.' % filename)
|
||||
|
||||
def path_to_pyfile_list(path):
|
||||
is_dir = False
|
||||
@ -78,7 +78,7 @@ def do_check(filename):
|
||||
with open(filename) as f:
|
||||
# try read first line of file
|
||||
try:
|
||||
head = [next(f) for x in xrange(1)]
|
||||
head = [next(f) for x in range(1)]
|
||||
except StopIteration:
|
||||
return
|
||||
|
||||
@ -89,4 +89,4 @@ def do_check(filename):
|
||||
need_update = False
|
||||
|
||||
if need_update:
|
||||
print 'No copyright info in %s.' % filename
|
||||
print('No copyright info in %s.' % filename)
|
||||
|
@ -18,7 +18,7 @@ def make(default=True, lang='en'):
|
||||
b2 = f.readline()
|
||||
|
||||
if b1 != b2:
|
||||
print 'Error: inconsistent Git branch names.'
|
||||
print('Error: inconsistent Git branch names.')
|
||||
return
|
||||
|
||||
|
||||
@ -63,7 +63,7 @@ def compile():
|
||||
def _inplace_change(filename, old_string, new_string):
|
||||
s = open(filename).read()
|
||||
if old_string in s:
|
||||
print(green('Changing "{old_string}" to "{new_string}" in "{filename}"'.format(**locals())))
|
||||
print((green('Changing "{old_string}" to "{new_string}" in "{filename}"'.format(**locals()))))
|
||||
s = s.replace(old_string, new_string)
|
||||
f = open(filename, 'w')
|
||||
f.write(s)
|
||||
@ -71,4 +71,4 @@ def _inplace_change(filename, old_string, new_string):
|
||||
f.close()
|
||||
|
||||
def _debug(msg):
|
||||
print(red('Running: {msg}'.format(**locals())))
|
||||
print((red('Running: {msg}'.format(**locals()))))
|
||||
|
@ -1,23 +1,20 @@
|
||||
python-dateutil==2.7.0
|
||||
python-memcached==1.57
|
||||
chardet==3.0.4
|
||||
six==1.11.0
|
||||
Pillow==4.3.0
|
||||
Django==1.11.15
|
||||
django-compressor==2.2
|
||||
git+git://github.com/haiwen/django-post_office.git@2312cf240363721f737b5ac8eb86ab8cb255938f#egg=django-post_office
|
||||
django-statici18n==1.7.0
|
||||
djangorestframework==3.3.3
|
||||
git+git://github.com/haiwen/django-constance.git@8508ff29141732190faff51d5c2b5474da297732#egg=django-constance[database]
|
||||
openpyxl==2.3.0
|
||||
pytz==2015.7
|
||||
django-formtools==2.1
|
||||
qrcode==5.3
|
||||
requests==2.20.1
|
||||
requests_oauthlib==0.8.0
|
||||
django-simple-captcha==0.5.6
|
||||
gunicorn==19.8.1
|
||||
django-webpack-loader==0.6.0
|
||||
git+git://github.com/haiwen/python-cas.git@ffc49235fd7cc32c4fdda5acfa3707e1405881df#egg=python_cas
|
||||
futures==3.2.0
|
||||
social-auth-core==1.7.0
|
||||
Django==1.11.23
|
||||
future
|
||||
captcha
|
||||
django-compressor
|
||||
django-statici18n
|
||||
django-constance
|
||||
django-post_office
|
||||
django-webpack_loader
|
||||
gunicorn
|
||||
pymysql
|
||||
django-picklefield
|
||||
openpyxl
|
||||
qrcode
|
||||
django-formtools
|
||||
django-simple-captcha
|
||||
djangorestframework
|
||||
python-dateutil
|
||||
requests
|
||||
pillow
|
||||
pyjwt
|
||||
|
@ -1,7 +1,9 @@
|
||||
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||
from signals import repo_created, repo_deleted, clean_up_repo_trash, \
|
||||
import pymysql
|
||||
pymysql.install_as_MySQLdb()
|
||||
from .signals import repo_created, repo_deleted, clean_up_repo_trash, \
|
||||
repo_restored
|
||||
from handlers import repo_created_cb, repo_deleted_cb, clean_up_repo_trash_cb, \
|
||||
from .handlers import repo_created_cb, repo_deleted_cb, clean_up_repo_trash_cb, \
|
||||
repo_restored_cb
|
||||
|
||||
repo_created.connect(repo_created_cb)
|
||||
|
@ -1,5 +1,5 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
from django.db import migrations, models
|
||||
import datetime
|
||||
|
@ -52,7 +52,7 @@ class AddressBookGroupsSearchMember(APIView):
|
||||
email_list += search_user_from_ccnet(q)
|
||||
email_list += search_user_from_profile(q)
|
||||
# remove duplicate emails
|
||||
email_list = {}.fromkeys(email_list).keys()
|
||||
email_list = list({}.fromkeys(email_list).keys())
|
||||
|
||||
try:
|
||||
# get all members in current group and its sub groups
|
||||
@ -64,7 +64,7 @@ class AddressBookGroupsSearchMember(APIView):
|
||||
|
||||
group_email_list = [m.user_name for m in all_members]
|
||||
# remove duplicate emails
|
||||
group_email_list = {}.fromkeys(group_email_list).keys()
|
||||
group_email_list = list({}.fromkeys(group_email_list).keys())
|
||||
|
||||
email_result = []
|
||||
for email in group_email_list:
|
||||
|
@ -192,11 +192,11 @@ class Account(APIView):
|
||||
if name is not None:
|
||||
if len(name) > 64:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u'Name is too long (maximum is 64 characters)'))
|
||||
_('Name is too long (maximum is 64 characters)'))
|
||||
|
||||
if "/" in name:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u"Name should not include '/'."))
|
||||
_("Name should not include '/'."))
|
||||
|
||||
# argument check for list_in_address_book
|
||||
list_in_address_book = request.data.get("list_in_address_book", None)
|
||||
@ -211,18 +211,18 @@ class Account(APIView):
|
||||
loginid = loginid.strip()
|
||||
if loginid == "":
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u"Login id can't be empty"))
|
||||
_("Login id can't be empty"))
|
||||
usernamebyloginid = Profile.objects.get_username_by_login_id(loginid)
|
||||
if usernamebyloginid is not None:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u"Login id %s already exists." % loginid))
|
||||
_("Login id %s already exists." % loginid))
|
||||
|
||||
# argument check for department
|
||||
department = request.data.get("department", None)
|
||||
if department is not None:
|
||||
if len(department) > 512:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u'Department is too long (maximum is 512 characters)'))
|
||||
_('Department is too long (maximum is 512 characters)'))
|
||||
|
||||
# argument check for institution
|
||||
institution = request.data.get("institution", None)
|
||||
@ -256,7 +256,7 @@ class Account(APIView):
|
||||
get_file_size_unit('MB')
|
||||
if space_quota_mb > org_quota_mb:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, \
|
||||
_(u'Failed to set quota: maximum quota is %d MB' % org_quota_mb))
|
||||
_('Failed to set quota: maximum quota is %d MB' % org_quota_mb))
|
||||
|
||||
# argument check for is_trial
|
||||
is_trial = request.data.get("is_trial", None)
|
||||
|
@ -77,14 +77,14 @@ class AdminAddressBookGroups(APIView):
|
||||
|
||||
# Check whether group name is validate.
|
||||
if not validate_group_name(group_name):
|
||||
error_msg = _(u'Name can only contain letters, numbers, blank, hyphen or underscore.')
|
||||
error_msg = _('Name can only contain letters, numbers, blank, hyphen or underscore.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# Check whether group name is duplicated.
|
||||
pattern_matched_groups = ccnet_api.search_groups(group_name, -1, -1)
|
||||
for group in pattern_matched_groups:
|
||||
if group.group_name == group_name:
|
||||
error_msg = _(u'There is already a group with that name.')
|
||||
error_msg = _('There is already a group with that name.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# Group owner is 'system admin'
|
||||
@ -228,11 +228,11 @@ class AdminAddressBookGroup(APIView):
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
if has_repo:
|
||||
error_msg = _(u'There are libraries in this department.')
|
||||
error_msg = _('There are libraries in this department.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
if len(child_groups) > 0:
|
||||
error_msg = _(u'There are sub-departments in this department.')
|
||||
error_msg = _('There are sub-departments in this department.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
try:
|
||||
|
@ -1,4 +1,5 @@
|
||||
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||
from functools import cmp_to_key
|
||||
|
||||
from rest_framework.authentication import SessionAuthentication
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
@ -83,7 +84,7 @@ class AdminDeviceTrustedIP(APIView):
|
||||
@check_parameter
|
||||
def get(self, request, format=None):
|
||||
ip_list = [ip.to_dict() for ip in TrustedIP.objects.all()]
|
||||
ip_list = sorted(ip_list, cmp=cmp_ip)
|
||||
ip_list = sorted(ip_list, key=cmp_to_key(cmp_ip))
|
||||
return Response(ip_list)
|
||||
|
||||
@check_parameter
|
||||
|
@ -56,7 +56,7 @@ class AdminFavicon(APIView):
|
||||
CUSTOM_FAVICON_PATH)
|
||||
|
||||
# save favicon file to custom dir
|
||||
with open(custom_favicon_file, 'w') as fd:
|
||||
with open(custom_favicon_file, 'wb') as fd:
|
||||
fd.write(favicon_file.read())
|
||||
|
||||
custom_symlink = os.path.join(MEDIA_ROOT,
|
||||
|
@ -55,7 +55,7 @@ class AdminFileScanRecords(APIView):
|
||||
else:
|
||||
record["repo_name"] = repo.name
|
||||
record_detail = json.loads(record['detail'])
|
||||
detail_dict = record_detail.values()[0]
|
||||
detail_dict = list(record_detail.values())[0]
|
||||
detail = dict()
|
||||
detail["suggestion"] = detail_dict["suggestion"]
|
||||
detail["label"] = detail_dict["label"]
|
||||
|
@ -100,7 +100,7 @@ class AdminGroupOwnedLibraries(APIView):
|
||||
storage_id=storage_id)
|
||||
else:
|
||||
# STORAGE_CLASS_MAPPING_POLICY == 'REPO_ID_MAPPING'
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.org_add_group_owned_repo(
|
||||
org_id, group_id, repo_name, permission, password,
|
||||
ENCRYPTED_LIBRARY_VERSION)
|
||||
@ -109,7 +109,7 @@ class AdminGroupOwnedLibraries(APIView):
|
||||
group_id, repo_name, permission, password,
|
||||
ENCRYPTED_LIBRARY_VERSION)
|
||||
else:
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.org_add_group_owned_repo(
|
||||
org_id, group_id, repo_name, permission, password,
|
||||
ENCRYPTED_LIBRARY_VERSION)
|
||||
|
@ -117,14 +117,14 @@ class AdminGroups(APIView):
|
||||
group_name = group_name.strip()
|
||||
# Check whether group name is validate.
|
||||
if not validate_group_name(group_name):
|
||||
error_msg = _(u'Group name can only contain letters, numbers, blank, hyphen, dot, single quote or underscore')
|
||||
error_msg = _('Group name can only contain letters, numbers, blank, hyphen, dot, single quote or underscore')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# Check whether group name is duplicated.
|
||||
pattern_matched_groups = ccnet_api.search_groups(group_name, -1, -1)
|
||||
for group in pattern_matched_groups:
|
||||
if group.group_name == group_name:
|
||||
error_msg = _(u'There is already a group with that name.')
|
||||
error_msg = _('There is already a group with that name.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
group_owner = request.data.get('group_owner', '')
|
||||
@ -200,7 +200,7 @@ class AdminGroup(APIView):
|
||||
|
||||
old_owner = group.creator_name
|
||||
if new_owner == old_owner:
|
||||
error_msg = _(u'User %s is already group owner.') % new_owner
|
||||
error_msg = _('User %s is already group owner.') % new_owner
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# transfer a group
|
||||
|
@ -138,8 +138,8 @@ class AdminLibraries(APIView):
|
||||
has_next_page = False
|
||||
|
||||
default_repo_id = get_system_default_repo_id()
|
||||
repos_all = filter(lambda r: not r.is_virtual, repos_all)
|
||||
repos_all = filter(lambda r: r.repo_id != default_repo_id, repos_all)
|
||||
repos_all = [r for r in repos_all if not r.is_virtual]
|
||||
repos_all = [r for r in repos_all if r.repo_id != default_repo_id]
|
||||
|
||||
return_results = []
|
||||
|
||||
@ -253,7 +253,7 @@ class AdminLibrary(APIView):
|
||||
try:
|
||||
org_id = seafile_api.get_org_id_by_repo_id(repo_id)
|
||||
related_usernames = get_related_users_by_repo(repo_id,
|
||||
org_id if org_id > 0 else None)
|
||||
org_id if org_id and org_id > 0 else None)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
org_id = -1
|
||||
@ -342,7 +342,7 @@ class AdminLibrary(APIView):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
if not new_owner_obj.permissions.can_add_repo():
|
||||
error_msg = _(u'Transfer failed: role of %s is %s, can not add library.') % \
|
||||
error_msg = _('Transfer failed: role of %s is %s, can not add library.') % \
|
||||
(new_owner, new_owner_obj.role)
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
@ -363,7 +363,7 @@ class AdminLibrary(APIView):
|
||||
repo_owner = seafile_api.get_repo_owner(repo_id)
|
||||
|
||||
if new_owner == repo_owner:
|
||||
error_msg = _(u"Library can not be transferred to owner.")
|
||||
error_msg = _("Library can not be transferred to owner.")
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# get repo shared to user/group list
|
||||
|
@ -46,7 +46,7 @@ class AdminLicense(APIView):
|
||||
error_msg = 'path %s invalid.' % LICENSE_PATH
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
with open(LICENSE_PATH, 'w') as fd:
|
||||
with open(LICENSE_PATH, 'wb') as fd:
|
||||
fd.write(license_file.read())
|
||||
|
||||
ccnet_api.reload_license()
|
||||
|
@ -56,7 +56,7 @@ class AdminLoginBgImage(APIView):
|
||||
custom_login_bg_image_file = os.path.join(SEAHUB_DATA_ROOT,
|
||||
custom_login_bg_image_path)
|
||||
# save login background image file to custom dir
|
||||
with open(custom_login_bg_image_file, 'w') as fd:
|
||||
with open(custom_login_bg_image_file, 'wb') as fd:
|
||||
fd.write(image_file.read())
|
||||
|
||||
custom_symlink = os.path.join(MEDIA_ROOT,
|
||||
|
@ -27,7 +27,7 @@ class AdminOrgStatsTraffic(APIView):
|
||||
'link-file-upload', 'link-file-download']
|
||||
init_count = [0] * 6
|
||||
init_data = get_init_data(start_time, end_time,
|
||||
dict(zip(op_type_list, init_count)))
|
||||
dict(list(zip(op_type_list, init_count))))
|
||||
|
||||
for e in get_org_traffic_by_day(org_id, start_time, end_time,
|
||||
get_time_offset()):
|
||||
@ -35,7 +35,7 @@ class AdminOrgStatsTraffic(APIView):
|
||||
init_data[dt].update({op_type: count})
|
||||
|
||||
res_data = []
|
||||
for k, v in init_data.items():
|
||||
for k, v in list(init_data.items()):
|
||||
res = {'datetime': datetime_to_isoformat_timestr(k)}
|
||||
res.update(v)
|
||||
res_data.append(res)
|
||||
|
@ -177,7 +177,7 @@ class AdminShares(APIView):
|
||||
if repo_owner == email:
|
||||
result['failed'].append({
|
||||
'user_email': email,
|
||||
'error_msg': _(u'User %s is already library owner.') % email
|
||||
'error_msg': _('User %s is already library owner.') % email
|
||||
})
|
||||
|
||||
continue
|
||||
@ -203,7 +203,7 @@ class AdminShares(APIView):
|
||||
if has_shared_to_user(repo.repo_id, path, email):
|
||||
result['failed'].append({
|
||||
'email': email,
|
||||
'error_msg': _(u'This item has been shared to %s.') % email
|
||||
'error_msg': _('This item has been shared to %s.') % email
|
||||
})
|
||||
continue
|
||||
|
||||
@ -260,7 +260,7 @@ class AdminShares(APIView):
|
||||
if has_shared_to_group(repo.repo_id, path, group_id):
|
||||
result['failed'].append({
|
||||
'group_name': group.group_name,
|
||||
'error_msg': _(u'This item has been shared to %s.') % group.group_name
|
||||
'error_msg': _('This item has been shared to %s.') % group.group_name
|
||||
})
|
||||
continue
|
||||
|
||||
|
@ -98,7 +98,7 @@ class FileOperationsView(APIView):
|
||||
ops_modified_dict[e[0]] = e[2]
|
||||
|
||||
res_data = []
|
||||
for k, v in ops_added_dict.items():
|
||||
for k, v in list(ops_added_dict.items()):
|
||||
res_data.append({'datetime': datetime_to_isoformat_timestr(k),
|
||||
'added': v,
|
||||
'visited': ops_visited_dict[k],
|
||||
@ -120,7 +120,7 @@ class TotalStorageView(APIView):
|
||||
init_data = get_init_data(start_time, end_time)
|
||||
for e in data:
|
||||
init_data[e[0]] = e[1]
|
||||
for k, v in init_data.items():
|
||||
for k, v in list(init_data.items()):
|
||||
res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'total_storage': v})
|
||||
|
||||
return Response(sorted(res_data, key=lambda x: x['datetime']))
|
||||
@ -139,7 +139,7 @@ class ActiveUsersView(APIView):
|
||||
init_data = get_init_data(start_time, end_time)
|
||||
for e in data:
|
||||
init_data[e[0]] = e[1]
|
||||
for k, v in init_data.items():
|
||||
for k, v in list(init_data.items()):
|
||||
res_data.append({'datetime': datetime_to_isoformat_timestr(k), 'count': v})
|
||||
|
||||
return Response(sorted(res_data, key=lambda x: x['datetime']))
|
||||
@ -157,7 +157,7 @@ class SystemTrafficView(APIView):
|
||||
'link-file-upload', 'link-file-download']
|
||||
init_count = [0] * 6
|
||||
init_data = get_init_data(start_time, end_time,
|
||||
dict(zip(op_type_list, init_count)))
|
||||
dict(list(zip(op_type_list, init_count))))
|
||||
|
||||
for e in get_system_traffic_by_day(start_time, end_time,
|
||||
get_time_offset()):
|
||||
@ -165,7 +165,7 @@ class SystemTrafficView(APIView):
|
||||
init_data[dt].update({op_type: count})
|
||||
|
||||
res_data = []
|
||||
for k, v in init_data.items():
|
||||
for k, v in list(init_data.items()):
|
||||
res = {'datetime': datetime_to_isoformat_timestr(k)}
|
||||
res.update(v)
|
||||
res_data.append(res)
|
||||
|
@ -272,7 +272,7 @@ class AdminUsers(APIView):
|
||||
if IS_EMAIL_CONFIGURED and SEND_EMAIL_ON_ADDING_SYSTEM_MEMBER:
|
||||
c = {'user': request.user.username, 'email': email, 'password': password}
|
||||
try:
|
||||
send_html_email(_(u'You are invited to join %s') % get_site_name(),
|
||||
send_html_email(_('You are invited to join %s') % get_site_name(),
|
||||
'sysadmin/user_add_email.html', c, None, [email])
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
@ -343,7 +343,7 @@ class AdminUser(APIView):
|
||||
username_by_login_id = Profile.objects.get_username_by_login_id(login_id)
|
||||
if username_by_login_id is not None:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u"Login id %s already exists." % login_id))
|
||||
_("Login id %s already exists." % login_id))
|
||||
|
||||
contact_email = request.data.get("contact_email", None)
|
||||
if contact_email is not None and contact_email.strip() != '':
|
||||
|
@ -33,7 +33,6 @@ DEPARTMENT_OWNER = 'system admin'
|
||||
|
||||
|
||||
# # uid = corpid + '_' + userid
|
||||
# from social_django.models import UserSocialAuth
|
||||
# get departments: https://work.weixin.qq.com/api/doc#90000/90135/90208
|
||||
# get members: https://work.weixin.qq.com/api/doc#90000/90135/90200
|
||||
|
||||
|
@ -147,7 +147,7 @@ class CopyMoveTaskView(APIView):
|
||||
|
||||
# check if above quota for dst repo
|
||||
if seafile_api.check_quota(dst_repo_id, current_size) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
new_dirent_name = check_filename_with_rename(dst_repo_id,
|
||||
dst_parent_dir, src_dirent_name)
|
||||
@ -162,7 +162,7 @@ class CopyMoveTaskView(APIView):
|
||||
if dirent_type == 'dir' and src_repo_id == dst_repo_id and \
|
||||
dst_parent_dir.startswith(src_dirent_path + '/'):
|
||||
|
||||
error_msg = _(u'Can not move directory %(src)s to its subdirectory %(des)s') \
|
||||
error_msg = _('Can not move directory %(src)s to its subdirectory %(des)s') \
|
||||
% {'src': escape(src_dirent_path), 'des': escape(dst_parent_dir)}
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
|
@ -83,8 +83,8 @@ def get_dir_file_info_list(username, request_type, repo_obj, parent_dir,
|
||||
# Use dict to reduce memcache fetch cost in large for-loop.
|
||||
nickname_dict = {}
|
||||
contact_email_dict = {}
|
||||
modifier_set = set([x.modifier for x in file_list])
|
||||
lock_owner_set = set([x.lock_owner for x in file_list])
|
||||
modifier_set = {x.modifier for x in file_list}
|
||||
lock_owner_set = {x.lock_owner for x in file_list}
|
||||
for e in modifier_set | lock_owner_set:
|
||||
if e not in nickname_dict:
|
||||
nickname_dict[e] = email2nickname(e)
|
||||
@ -167,8 +167,8 @@ def get_dir_file_info_list(username, request_type, repo_obj, parent_dir,
|
||||
|
||||
file_info_list.append(file_info)
|
||||
|
||||
dir_info_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
file_info_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
dir_info_list.sort(key=lambda x: x['name'].lower())
|
||||
file_info_list.sort(key=lambda x: x['name'].lower())
|
||||
|
||||
return dir_info_list, file_info_list
|
||||
|
||||
@ -445,7 +445,7 @@ class DirView(APIView):
|
||||
dir_info = self.get_dir_info(repo_id, new_dir_path)
|
||||
resp = Response(dir_info)
|
||||
return resp
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
@ -313,7 +313,7 @@ class DirSharedItemsEndpoint(APIView):
|
||||
if not is_valid_username(to_user):
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'username invalid.')
|
||||
'error_msg': _('username invalid.')
|
||||
})
|
||||
continue
|
||||
|
||||
@ -322,14 +322,14 @@ class DirSharedItemsEndpoint(APIView):
|
||||
except User.DoesNotExist:
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'User %s not found.') % to_user
|
||||
'error_msg': _('User %s not found.') % to_user
|
||||
})
|
||||
continue
|
||||
|
||||
if self.has_shared_to_user(request, repo_id, path, to_user):
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'This item has been shared to %s.') % email2nickname(to_user)
|
||||
'error_msg': _('This item has been shared to %s.') % email2nickname(to_user)
|
||||
})
|
||||
continue
|
||||
|
||||
@ -428,7 +428,7 @@ class DirSharedItemsEndpoint(APIView):
|
||||
if self.has_shared_to_group(request, repo_id, path, gid):
|
||||
result['failed'].append({
|
||||
'group_name': group.group_name,
|
||||
'error_msg': _(u'This item has been shared to %s.') % group.group_name
|
||||
'error_msg': _('This item has been shared to %s.') % group.group_name
|
||||
})
|
||||
continue
|
||||
|
||||
|
@ -67,7 +67,7 @@ class DraftReviewerView(APIView):
|
||||
if not is_valid_username(reviewer):
|
||||
result['failed'].append({
|
||||
'email': reviewer,
|
||||
'error_msg': _(u'username invalid.')
|
||||
'error_msg': _('username invalid.')
|
||||
})
|
||||
continue
|
||||
|
||||
@ -76,7 +76,7 @@ class DraftReviewerView(APIView):
|
||||
except User.DoesNotExist:
|
||||
result['failed'].append({
|
||||
'email': reviewer,
|
||||
'error_msg': _(u'User %s not found.') % reviewer
|
||||
'error_msg': _('User %s not found.') % reviewer
|
||||
})
|
||||
continue
|
||||
|
||||
@ -93,7 +93,7 @@ class DraftReviewerView(APIView):
|
||||
origin_file_path = posixpath.join(uuid.parent_path, uuid.filename)
|
||||
# check perm
|
||||
if seafile_api.check_permission_by_path(d.origin_repo_id, origin_file_path, reviewer) != 'rw':
|
||||
error_msg = _(u'Permission denied.')
|
||||
error_msg = _('Permission denied.')
|
||||
result['failed'].append({
|
||||
'email': reviewer,
|
||||
'error_msg': error_msg
|
||||
@ -101,7 +101,7 @@ class DraftReviewerView(APIView):
|
||||
continue
|
||||
|
||||
if DraftReviewer.objects.filter(draft=d, reviewer=reviewer):
|
||||
error_msg = u'Reviewer %s has existed.' % reviewer
|
||||
error_msg = 'Reviewer %s has existed.' % reviewer
|
||||
result['failed'].append({
|
||||
'email': reviewer,
|
||||
'error_msg': error_msg
|
||||
|
@ -50,7 +50,7 @@ class WorkspacesView(APIView):
|
||||
if is_org_context(request):
|
||||
org_id = request.user.org.org_id
|
||||
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
groups = ccnet_api.get_org_groups_by_user(org_id, username)
|
||||
else:
|
||||
groups = ccnet_api.get_groups(username, return_ancestors=True)
|
||||
@ -80,7 +80,7 @@ class WorkspacesView(APIView):
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
try:
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.create_org_repo(
|
||||
_("My Workspace"),
|
||||
_("My Workspace"),
|
||||
@ -166,7 +166,7 @@ class DTablesView(APIView):
|
||||
org_id = request.user.org.org_id
|
||||
|
||||
try:
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.create_org_repo(
|
||||
_("My Workspace"),
|
||||
_("My Workspace"),
|
||||
|
@ -199,7 +199,7 @@ class FileView(APIView):
|
||||
|
||||
try:
|
||||
seafile_api.post_empty_file(repo_id, parent_dir, new_file_name, username)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
@ -572,7 +572,7 @@ class FileView(APIView):
|
||||
expire = request.data.get('expire', FILE_LOCK_EXPIRATION_DAYS)
|
||||
try:
|
||||
seafile_api.lock_file(repo_id, path, username, expire)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
@ -587,7 +587,7 @@ class FileView(APIView):
|
||||
# unlock file
|
||||
try:
|
||||
seafile_api.unlock_file(repo_id, path)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
@ -605,7 +605,7 @@ class FileView(APIView):
|
||||
# refresh lock file
|
||||
try:
|
||||
seafile_api.refresh_file_lock(repo_id, path)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
@ -253,7 +253,7 @@ class FileTagsView(APIView):
|
||||
name_list = [name.strip() for name in names.split(",")]
|
||||
for name in name_list:
|
||||
if not check_tagname(name):
|
||||
error_msg = _(u'Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
error_msg = _('Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
FileTag.objects.delete_all_filetag_by_path(repo_id, parent_path,
|
||||
@ -277,7 +277,7 @@ class FileTagsView(APIView):
|
||||
name_list = [name.strip() for name in names.split(",")]
|
||||
for name in name_list:
|
||||
if not check_tagname(name):
|
||||
error_msg = _(u'Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
error_msg = _('Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
res_tag_list = []
|
||||
@ -297,10 +297,10 @@ class FileTagView(APIView):
|
||||
@check_parameter
|
||||
def delete(self, request, repo_id, parent_path, filename, name, is_dir):
|
||||
if not name or not check_tagname(name):
|
||||
error_msg = _(u'Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
error_msg = _('Tag can only contain letters, numbers, dot, hyphen or underscore.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
if FileTag.objects.delete_file_tag_by_path(repo_id,
|
||||
parent_path,filename,is_dir,name):
|
||||
parent_path, filename, is_dir, name):
|
||||
return Response({"success": True}, status=status.HTTP_200_OK)
|
||||
else:
|
||||
return Response({"success": True}, status=status.HTTP_202_ACCEPTED)
|
||||
|
@ -82,7 +82,7 @@ class GroupLibraries(APIView):
|
||||
else:
|
||||
group_repos = seafile_api.get_repos_by_group(group_id)
|
||||
|
||||
group_repos.sort(lambda x, y: cmp(y.last_modified, x.last_modified))
|
||||
group_repos.sort(key=lambda x: x.last_modified, reverse=True)
|
||||
|
||||
# get repo id owner dict
|
||||
all_repo_owner = []
|
||||
|
@ -91,13 +91,13 @@ class GroupMembers(APIView):
|
||||
|
||||
try:
|
||||
if is_group_member(group_id, email):
|
||||
error_msg = _(u'User %s is already a group member.') % email2nickname(email)
|
||||
error_msg = _('User %s is already a group member.') % email2nickname(email)
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
if is_org_context(request):
|
||||
org_id = request.user.org.org_id
|
||||
if not ccnet_api.org_user_exists(org_id, email):
|
||||
error_msg = _(u'User %s not found in organization.') % email2nickname(email)
|
||||
error_msg = _('User %s not found in organization.') % email2nickname(email)
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
ccnet_api.group_add_member(group_id, username, email)
|
||||
@ -290,7 +290,7 @@ class GroupMembersBulk(APIView):
|
||||
result['failed'].append({
|
||||
'email': email,
|
||||
'email_name': email_name,
|
||||
'error_msg': _(u'User %s is already a group member.') % email_name
|
||||
'error_msg': _('User %s is already a group member.') % email_name
|
||||
})
|
||||
continue
|
||||
|
||||
@ -300,7 +300,7 @@ class GroupMembersBulk(APIView):
|
||||
result['failed'].append({
|
||||
'email': email,
|
||||
'email_name': email_name,
|
||||
'error_msg': _(u'User %s not found in organization.') % email_name
|
||||
'error_msg': _('User %s not found in organization.') % email_name
|
||||
})
|
||||
continue
|
||||
|
||||
|
@ -369,7 +369,7 @@ class GroupOwnedLibraryUserFolderPermission(APIView):
|
||||
if permission:
|
||||
result['failed'].append({
|
||||
'user_email': user,
|
||||
'error_msg': _(u'Permission already exists.')
|
||||
'error_msg': _('Permission already exists.')
|
||||
})
|
||||
continue
|
||||
|
||||
@ -641,7 +641,7 @@ class GroupOwnedLibraryGroupFolderPermission(APIView):
|
||||
if permission:
|
||||
result['failed'].append({
|
||||
'group_id': group_id,
|
||||
'error_msg': _(u'Permission already exists.')
|
||||
'error_msg': _('Permission already exists.')
|
||||
})
|
||||
continue
|
||||
|
||||
@ -914,7 +914,7 @@ class GroupOwnedLibraryUserShare(APIView):
|
||||
if not is_valid_username(to_user):
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'username invalid.')
|
||||
'error_msg': _('username invalid.')
|
||||
})
|
||||
continue
|
||||
|
||||
@ -923,14 +923,14 @@ class GroupOwnedLibraryUserShare(APIView):
|
||||
except User.DoesNotExist:
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'User %s not found.') % to_user
|
||||
'error_msg': _('User %s not found.') % to_user
|
||||
})
|
||||
continue
|
||||
|
||||
if self.has_shared_to_user(request, repo_id, path, to_user):
|
||||
result['failed'].append({
|
||||
'email': to_user,
|
||||
'error_msg': _(u'This item has been shared to %s.') % to_user
|
||||
'error_msg': _('This item has been shared to %s.') % to_user
|
||||
})
|
||||
continue
|
||||
|
||||
@ -1198,7 +1198,7 @@ class GroupOwnedLibraryGroupShare(APIView):
|
||||
if self.has_shared_to_group(request, repo_id, path, gid):
|
||||
result['failed'].append({
|
||||
'group_name': group.group_name,
|
||||
'error_msg': _(u'This item has been shared to %s.') % group.group_name
|
||||
'error_msg': _('This item has been shared to %s.') % group.group_name
|
||||
})
|
||||
continue
|
||||
|
||||
|
@ -41,7 +41,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def get_group_admins(group_id):
|
||||
members = seaserv.get_group_members(group_id)
|
||||
admin_members = filter(lambda m: m.is_staff, members)
|
||||
admin_members = [m for m in members if m.is_staff]
|
||||
|
||||
admins = []
|
||||
for u in admin_members:
|
||||
@ -208,12 +208,12 @@ class Groups(APIView):
|
||||
|
||||
# Check whether group name is validate.
|
||||
if not validate_group_name(group_name):
|
||||
error_msg = _(u'Group name can only contain letters, numbers, blank, hyphen, dot, single quote or underscore')
|
||||
error_msg = _('Group name can only contain letters, numbers, blank, hyphen, dot, single quote or underscore')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# Check whether group name is duplicated.
|
||||
if check_group_name_conflict(request, group_name):
|
||||
error_msg = _(u'There is already a group with that name.')
|
||||
error_msg = _('There is already a group with that name.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# create group.
|
||||
@ -284,12 +284,12 @@ class Group(APIView):
|
||||
|
||||
# Check whether group name is validate.
|
||||
if not validate_group_name(new_group_name):
|
||||
error_msg = _(u'Group name can only contain letters, numbers, blank, hyphen or underscore')
|
||||
error_msg = _('Group name can only contain letters, numbers, blank, hyphen or underscore')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# Check whether group name is duplicated.
|
||||
if check_group_name_conflict(request, new_group_name):
|
||||
error_msg = _(u'There is already a group with that name.')
|
||||
error_msg = _('There is already a group with that name.')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
seaserv.ccnet_threaded_rpc.set_group_name(group_id, new_group_name)
|
||||
@ -314,7 +314,7 @@ class Group(APIView):
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
if is_group_owner(group_id, new_owner):
|
||||
error_msg = _(u'User %s is already group owner.') % new_owner
|
||||
error_msg = _('User %s is already group owner.') % new_owner
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# transfer a group
|
||||
|
@ -169,7 +169,7 @@ class MoveFolderMergeView(APIView):
|
||||
src_repo.version, dir_id)
|
||||
|
||||
if seafile_api.check_quota(dst_repo_id, current_size) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
username = request.user.username
|
||||
move_folder_with_merge(username,
|
||||
|
@ -44,7 +44,7 @@ class QueryCopyMoveProgressView(APIView):
|
||||
|
||||
# res can be None
|
||||
if not res:
|
||||
error_msg = _(u'Error')
|
||||
error_msg = _('Error')
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
result = {}
|
||||
|
@ -215,7 +215,7 @@ class RelatedFileView(APIView):
|
||||
|
||||
# permission check
|
||||
if check_folder_permission(request, repo_id, '/') != PERMISSION_READ_WRITE:
|
||||
print check_folder_permission(request, repo_id, file_path)
|
||||
print(check_folder_permission(request, repo_id, file_path))
|
||||
error_msg = 'Permission denied.'
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
|
@ -129,14 +129,14 @@ class RepoHistory(APIView):
|
||||
revision_tags = []
|
||||
|
||||
for tag in revision_tags:
|
||||
if commit_tag_dict.has_key(tag.revision_id):
|
||||
if tag.revision_id in commit_tag_dict:
|
||||
commit_tag_dict[tag.revision_id].append(tag.tag.name)
|
||||
else:
|
||||
commit_tag_dict[tag.revision_id] = [tag.tag.name]
|
||||
|
||||
for item in items:
|
||||
item['tags'] = []
|
||||
for commit_id, tags in commit_tag_dict.items():
|
||||
for commit_id, tags in list(commit_tag_dict.items()):
|
||||
if commit_id == item['commit_id']:
|
||||
item['tags'] = tags
|
||||
|
||||
|
@ -41,7 +41,7 @@ class RepoSendNewPassword(APIView):
|
||||
|
||||
if not ENABLE_RESET_ENCRYPTED_REPO_PASSWORD or \
|
||||
not IS_EMAIL_CONFIGURED:
|
||||
error_msg = _(u'Feature disabled.')
|
||||
error_msg = _('Feature disabled.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# resource check
|
||||
@ -62,14 +62,14 @@ class RepoSendNewPassword(APIView):
|
||||
|
||||
secret_key = RepoSecretKey.objects.get_secret_key(repo_id)
|
||||
if not secret_key:
|
||||
error_msg = _(u"Can not reset this library's password.")
|
||||
error_msg = _("Can not reset this library's password.")
|
||||
return api_error(HTTP_520_OPERATION_FAILED, error_msg)
|
||||
|
||||
new_password = get_random_string(10)
|
||||
try:
|
||||
seafile_api.reset_repo_passwd(repo_id, username, secret_key, new_password)
|
||||
content = {'repo_name': repo.name, 'password': new_password,}
|
||||
send_html_email(_(u'New password of library %s') % repo.name,
|
||||
send_html_email(_('New password of library %s') % repo.name,
|
||||
'snippets/reset_repo_password.html', content,
|
||||
None, [email2contact_email(username)])
|
||||
except Exception as e:
|
||||
|
@ -64,13 +64,13 @@ class RepoSetPassword(APIView):
|
||||
error_msg = 'Bad arguments'
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
elif e.msg == 'Incorrect password':
|
||||
error_msg = _(u'Wrong password')
|
||||
error_msg = _('Wrong password')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
elif e.msg == 'Internal server error':
|
||||
error_msg = _(u'Internal server error')
|
||||
error_msg = _('Internal server error')
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
else:
|
||||
error_msg = _(u'Decrypt library error')
|
||||
error_msg = _('Decrypt library error')
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
if ENABLE_RESET_ENCRYPTED_REPO_PASSWORD:
|
||||
@ -124,7 +124,7 @@ class RepoSetPassword(APIView):
|
||||
seafile_api.change_repo_passwd(repo_id, old_password, new_password, username)
|
||||
except Exception as e:
|
||||
if e.msg == 'Incorrect password':
|
||||
error_msg = _(u'Wrong old password')
|
||||
error_msg = _('Wrong old password')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
else:
|
||||
logger.error(e)
|
||||
@ -157,7 +157,7 @@ class RepoSetPassword(APIView):
|
||||
|
||||
secret_key = RepoSecretKey.objects.get_secret_key(repo_id)
|
||||
if not secret_key:
|
||||
error_msg = _(u"Can not reset this library's password.")
|
||||
error_msg = _("Can not reset this library's password.")
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
try:
|
||||
|
@ -70,7 +70,7 @@ class RepoTagsView(APIView):
|
||||
for repo_tag in repo_tag_list:
|
||||
res = repo_tag.to_dict()
|
||||
repo_tag_id = res["repo_tag_id"]
|
||||
if files_count.has_key(repo_tag_id):
|
||||
if repo_tag_id in files_count:
|
||||
res["files_count"] = files_count[repo_tag_id]
|
||||
else:
|
||||
res["files_count"] = 0
|
||||
|
@ -112,8 +112,8 @@ class RepoTrash(APIView):
|
||||
entries_without_scan_stat = deleted_entries[0:-1]
|
||||
|
||||
# sort entry by delete time
|
||||
entries_without_scan_stat.sort(lambda x, y : cmp(y.delete_time,
|
||||
x.delete_time))
|
||||
entries_without_scan_stat.sort(
|
||||
key=lambda x: x.delete_time, reverse=True)
|
||||
|
||||
for item in entries_without_scan_stat:
|
||||
item_info = self.get_item_info(item)
|
||||
|
@ -28,7 +28,7 @@ from seahub.utils.repo import get_repo_owner, is_repo_admin, \
|
||||
|
||||
from seahub.settings import ENABLE_STORAGE_CLASSES
|
||||
|
||||
from seaserv import seafile_api, send_message
|
||||
from seaserv import seafile_api
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -56,7 +56,7 @@ class ReposView(APIView):
|
||||
request_type_list = request.GET.getlist('type', "")
|
||||
if not request_type_list:
|
||||
# set all to True, no filter applied
|
||||
filter_by = filter_by.fromkeys(filter_by.iterkeys(), True)
|
||||
filter_by = filter_by.fromkeys(iter(filter_by.keys()), True)
|
||||
|
||||
for request_type in request_type_list:
|
||||
request_type = request_type.strip()
|
||||
@ -90,14 +90,14 @@ class ReposView(APIView):
|
||||
ret_corrupted=True)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
modifiers_set = set([x.last_modifier for x in owned_repos])
|
||||
modifiers_set = {x.last_modifier for x in owned_repos}
|
||||
for e in modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
if e not in nickname_dict:
|
||||
nickname_dict[e] = email2nickname(e)
|
||||
|
||||
owned_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
owned_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
for r in owned_repos:
|
||||
|
||||
# do not return virtual repos
|
||||
@ -141,15 +141,15 @@ class ReposView(APIView):
|
||||
get_repos_with_admin_permission(email)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
owners_set = set([x.user for x in shared_repos])
|
||||
modifiers_set = set([x.last_modifier for x in shared_repos])
|
||||
owners_set = {x.user for x in shared_repos}
|
||||
modifiers_set = {x.last_modifier for x in shared_repos}
|
||||
for e in owners_set | modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
if e not in nickname_dict:
|
||||
nickname_dict[e] = email2nickname(e)
|
||||
|
||||
shared_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
shared_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
for r in shared_repos:
|
||||
|
||||
owner_email = r.user
|
||||
@ -198,11 +198,11 @@ class ReposView(APIView):
|
||||
else:
|
||||
group_repos = seafile_api.get_group_repos_by_user(email)
|
||||
|
||||
group_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
group_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
share_from_set = set([x.user for x in group_repos])
|
||||
modifiers_set = set([x.last_modifier for x in group_repos])
|
||||
share_from_set = {x.user for x in group_repos}
|
||||
modifiers_set = {x.last_modifier for x in group_repos}
|
||||
for e in modifiers_set | share_from_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -243,8 +243,8 @@ class ReposView(APIView):
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
owner_set = set(all_repo_owner)
|
||||
share_from_set = set([x.user for x in public_repos])
|
||||
modifiers_set = set([x.last_modifier for x in public_repos])
|
||||
share_from_set = {x.user for x in public_repos}
|
||||
modifiers_set = {x.last_modifier for x in public_repos}
|
||||
for e in modifiers_set | share_from_set | owner_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -276,7 +276,7 @@ class ReposView(APIView):
|
||||
timestamp = utc_dt.strftime('%Y-%m-%d %H:%M:%S')
|
||||
org_id = request.user.org.org_id if is_org_context(request) else -1
|
||||
try:
|
||||
send_message('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id))
|
||||
seafile_api.publish_event('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id))
|
||||
except Exception as e:
|
||||
logger.error('Error when sending user-login message: %s' % str(e))
|
||||
|
||||
|
@ -488,7 +488,7 @@ class ReposBatchCopyDirView(APIView):
|
||||
|
||||
# check if above quota for dst repo
|
||||
if seafile_api.check_quota(dst_repo_id, total_size) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
result = {}
|
||||
result['failed'] = []
|
||||
@ -1249,18 +1249,18 @@ class ReposAsyncBatchMoveItemView(APIView):
|
||||
locked_files = get_locked_files_by_dir(request, src_repo_id, src_parent_dir)
|
||||
for dirent in src_dirents:
|
||||
# file is locked and lock owner is not current user
|
||||
if dirent in locked_files.keys() and \
|
||||
if dirent in list(locked_files.keys()) and \
|
||||
locked_files[dirent] != username:
|
||||
error_msg = _(u'File %s is locked.') % dirent
|
||||
error_msg = _('File %s is locked.') % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# check sub folder permission
|
||||
folder_permission_dict = get_sub_folder_permission_by_dir(request,
|
||||
src_repo_id, src_parent_dir)
|
||||
for dirent in src_dirents:
|
||||
if dirent in folder_permission_dict.keys() and \
|
||||
if dirent in list(folder_permission_dict.keys()) and \
|
||||
folder_permission_dict[dirent] != 'rw':
|
||||
error_msg = _(u"Can't move folder %s, please check its permission.") % dirent
|
||||
error_msg = _("Can't move folder %s, please check its permission.") % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# move file
|
||||
@ -1468,18 +1468,18 @@ class ReposSyncBatchMoveItemView(APIView):
|
||||
locked_files = get_locked_files_by_dir(request, src_repo_id, src_parent_dir)
|
||||
for dirent in src_dirents:
|
||||
# file is locked and lock owner is not current user
|
||||
if dirent in locked_files.keys() and \
|
||||
if dirent in list(locked_files.keys()) and \
|
||||
locked_files[dirent] != username:
|
||||
error_msg = _(u'File %s is locked.') % dirent
|
||||
error_msg = _('File %s is locked.') % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# check sub folder permission
|
||||
folder_permission_dict = get_sub_folder_permission_by_dir(request,
|
||||
src_repo_id, src_parent_dir)
|
||||
for dirent in src_dirents:
|
||||
if dirent in folder_permission_dict.keys() and \
|
||||
if dirent in list(folder_permission_dict.keys()) and \
|
||||
folder_permission_dict[dirent] != 'rw':
|
||||
error_msg = _(u"Can't move folder %s, please check its permission.") % dirent
|
||||
error_msg = _("Can't move folder %s, please check its permission.") % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# move file
|
||||
@ -1557,17 +1557,17 @@ class ReposBatchDeleteItemView(APIView):
|
||||
locked_files = get_locked_files_by_dir(request, repo_id, parent_dir)
|
||||
for dirent in dirents:
|
||||
# file is locked and lock owner is not current user
|
||||
if dirent in locked_files.keys() and \
|
||||
if dirent in list(locked_files.keys()) and \
|
||||
locked_files[dirent] != username:
|
||||
error_msg = _(u'File %s is locked.') % dirent
|
||||
error_msg = _('File %s is locked.') % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# check sub folder permission
|
||||
folder_permission_dict = get_sub_folder_permission_by_dir(request, repo_id, parent_dir)
|
||||
for dirent in dirents:
|
||||
if dirent in folder_permission_dict.keys() and \
|
||||
if dirent in list(folder_permission_dict.keys()) and \
|
||||
folder_permission_dict[dirent] != 'rw':
|
||||
error_msg = _(u"Can't delete folder %s, please check its permission.") % dirent
|
||||
error_msg = _("Can't delete folder %s, please check its permission.") % dirent
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# delete file
|
||||
|
@ -120,7 +120,7 @@ class SearchUser(APIView):
|
||||
## search finished, now filter out some users
|
||||
|
||||
# remove duplicate emails
|
||||
email_list = {}.fromkeys(email_list).keys()
|
||||
email_list = list({}.fromkeys(email_list).keys())
|
||||
|
||||
email_result = []
|
||||
|
||||
|
@ -30,7 +30,7 @@ class SendShareLinkView(APIView):
|
||||
def post(self, request):
|
||||
|
||||
if not IS_EMAIL_CONFIGURED:
|
||||
error_msg = _(u'Sending shared link failed. Email service is not properly configured, please contact administrator.')
|
||||
error_msg = _('Sending shared link failed. Email service is not properly configured, please contact administrator.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# check args
|
||||
@ -97,11 +97,11 @@ class SendShareLinkView(APIView):
|
||||
template = 'shared_link_email.html'
|
||||
|
||||
if link.s_type == 'f':
|
||||
c['file_shared_type'] = _(u"file")
|
||||
title = _(u'A file is shared to you on %s') % get_site_name()
|
||||
c['file_shared_type'] = _("file")
|
||||
title = _('A file is shared to you on %s') % get_site_name()
|
||||
else:
|
||||
c['file_shared_type'] = _(u"directory")
|
||||
title = _(u'A directory is shared to you on %s') % get_site_name()
|
||||
c['file_shared_type'] = _("directory")
|
||||
title = _('A directory is shared to you on %s') % get_site_name()
|
||||
|
||||
# send email
|
||||
try:
|
||||
|
@ -29,7 +29,7 @@ class SendUploadLinkView(APIView):
|
||||
def post(self, request):
|
||||
|
||||
if not IS_EMAIL_CONFIGURED:
|
||||
error_msg = _(u'Sending shared link failed. Email service is not properly configured, please contact administrator.')
|
||||
error_msg = _('Sending shared link failed. Email service is not properly configured, please contact administrator.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# check args
|
||||
@ -70,7 +70,7 @@ class SendUploadLinkView(APIView):
|
||||
|
||||
if not is_valid_email(to_email):
|
||||
failed_info['email'] = to_email
|
||||
failed_info['error_msg'] = _(u'email invalid.')
|
||||
failed_info['error_msg'] = _('email invalid.')
|
||||
result['failed'].append(failed_info)
|
||||
continue
|
||||
|
||||
@ -92,7 +92,7 @@ class SendUploadLinkView(APIView):
|
||||
reply_to = None
|
||||
|
||||
c['shared_upload_link'] = gen_shared_upload_link(token)
|
||||
title = _(u'An upload link is shared to you on %s') % get_site_name()
|
||||
title = _('An upload link is shared to you on %s') % get_site_name()
|
||||
template = 'shared_upload_link_email.html'
|
||||
|
||||
# send email
|
||||
@ -102,7 +102,7 @@ class SendUploadLinkView(APIView):
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
failed_info['email'] = to_email
|
||||
failed_info['error_msg'] = _(u'Internal Server Error')
|
||||
failed_info['error_msg'] = _('Internal Server Error')
|
||||
result['failed'].append(failed_info)
|
||||
|
||||
return Response(result)
|
||||
|
@ -93,7 +93,7 @@ def check_permissions_arg(request):
|
||||
if permissions is not None:
|
||||
if isinstance(permissions, dict):
|
||||
perm_dict = permissions
|
||||
elif isinstance(permissions, basestring):
|
||||
elif isinstance(permissions, str):
|
||||
import json
|
||||
try:
|
||||
perm_dict = json.loads(permissions)
|
||||
@ -164,7 +164,7 @@ class ShareLinks(APIView):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
# filter share links by repo
|
||||
fileshares = filter(lambda fs: fs.repo_id == repo_id, fileshares)
|
||||
fileshares = [fs for fs in fileshares if fs.repo_id == repo_id]
|
||||
|
||||
path = request.GET.get('path', None)
|
||||
if path:
|
||||
@ -189,7 +189,7 @@ class ShareLinks(APIView):
|
||||
if s_type == 'd' and path[-1] != '/':
|
||||
path = path + '/'
|
||||
|
||||
fileshares = filter(lambda fs: fs.path == path, fileshares)
|
||||
fileshares = [fs for fs in fileshares if fs.path == path]
|
||||
|
||||
links_info = []
|
||||
for fs in fileshares:
|
||||
@ -199,11 +199,11 @@ class ShareLinks(APIView):
|
||||
if len(links_info) == 1:
|
||||
result = links_info
|
||||
else:
|
||||
dir_list = filter(lambda x: x['is_dir'], links_info)
|
||||
file_list = filter(lambda x: not x['is_dir'], links_info)
|
||||
dir_list = [x for x in links_info if x['is_dir']]
|
||||
file_list = [x for x in links_info if not x['is_dir']]
|
||||
|
||||
dir_list.sort(lambda x, y: cmp(x['obj_name'], y['obj_name']))
|
||||
file_list.sort(lambda x, y: cmp(x['obj_name'], y['obj_name']))
|
||||
dir_list.sort(key=lambda x: x['obj_name'])
|
||||
file_list.sort(key=lambda x: x['obj_name'])
|
||||
|
||||
result = dir_list + file_list
|
||||
|
||||
@ -297,7 +297,7 @@ class ShareLinks(APIView):
|
||||
if s_type == 'f':
|
||||
fs = FileShare.objects.get_file_link_by_path(username, repo_id, path)
|
||||
if fs:
|
||||
error_msg = _(u'Share link %s already exists.' % fs.token)
|
||||
error_msg = _('Share link %s already exists.' % fs.token)
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
fs = FileShare.objects.create_file_link(username, repo_id, path,
|
||||
password, expire_date,
|
||||
@ -306,7 +306,7 @@ class ShareLinks(APIView):
|
||||
elif s_type == 'd':
|
||||
fs = FileShare.objects.get_dir_link_by_path(username, repo_id, path)
|
||||
if fs:
|
||||
error_msg = _(u'Share link %s already exists.' % fs.token)
|
||||
error_msg = _('Share link %s already exists.' % fs.token)
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
fs = FileShare.objects.create_dir_link(username, repo_id, path,
|
||||
password, expire_date,
|
||||
@ -454,7 +454,7 @@ class ShareLinkOnlineOfficeLock(APIView):
|
||||
# refresh lock file
|
||||
try:
|
||||
seafile_api.refresh_file_lock(repo_id, path)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
error_msg = 'Internal Server Error'
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
@ -49,7 +49,7 @@ class SharedFolders(APIView):
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
returned_result = []
|
||||
shared_repos.sort(lambda x, y: cmp(x.repo_name, y.repo_name))
|
||||
shared_repos.sort(key=lambda x: x.repo_name)
|
||||
for repo in shared_repos:
|
||||
if not repo.is_virtual:
|
||||
continue
|
||||
|
@ -55,7 +55,7 @@ class SharedRepos(APIView):
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
returned_result = []
|
||||
shared_repos.sort(lambda x, y: cmp(x.repo_name, y.repo_name))
|
||||
shared_repos.sort(key=lambda x: x.repo_name)
|
||||
usernames = []
|
||||
gids = []
|
||||
for repo in shared_repos:
|
||||
@ -309,10 +309,10 @@ class SharedRepo(APIView):
|
||||
if share_type == 'public':
|
||||
pub_repos = []
|
||||
if org_id:
|
||||
pub_repos = seaserv.list_org_inner_pub_repos(org_id, username)
|
||||
pub_repos = seafile_api.list_org_inner_pub_repos(org_id)
|
||||
|
||||
if not request.cloud_mode:
|
||||
pub_repos = seaserv.list_inner_pub_repos(username)
|
||||
pub_repos = seafile_api.get_inner_pub_repo_list()
|
||||
|
||||
try:
|
||||
if org_id:
|
||||
|
@ -105,9 +105,9 @@ class StarredItems(APIView):
|
||||
else:
|
||||
starred_files.append(item_info)
|
||||
|
||||
starred_repos.sort(lambda x, y: cmp(y['mtime'], x['mtime']))
|
||||
starred_folders.sort(lambda x, y: cmp(y['mtime'], x['mtime']))
|
||||
starred_files.sort(lambda x, y: cmp(y['mtime'], x['mtime']))
|
||||
starred_repos.sort(key=lambda x: x['mtime'], reverse=True)
|
||||
starred_folders.sort(key=lambda x: x['mtime'], reverse=True)
|
||||
starred_files.sort(key=lambda x: x['mtime'], reverse=True)
|
||||
|
||||
return Response({'starred_item_list': starred_repos + \
|
||||
starred_folders + starred_files})
|
||||
|
@ -95,7 +95,7 @@ class UploadLinks(APIView):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
# filter share links by repo
|
||||
upload_link_shares = filter(lambda ufs: ufs.repo_id==repo_id, upload_link_shares)
|
||||
upload_link_shares = [ufs for ufs in upload_link_shares if ufs.repo_id==repo_id]
|
||||
|
||||
path = request.GET.get('path', None)
|
||||
if path:
|
||||
@ -114,7 +114,7 @@ class UploadLinks(APIView):
|
||||
path = path + '/'
|
||||
|
||||
# filter share links by path
|
||||
upload_link_shares = filter(lambda ufs: ufs.path==path, upload_link_shares)
|
||||
upload_link_shares = [ufs for ufs in upload_link_shares if ufs.path==path]
|
||||
|
||||
result = []
|
||||
for uls in upload_link_shares:
|
||||
@ -124,7 +124,7 @@ class UploadLinks(APIView):
|
||||
if len(result) == 1:
|
||||
result = result
|
||||
else:
|
||||
result.sort(lambda x, y: cmp(x['obj_name'], y['obj_name']))
|
||||
result.sort(key=lambda x: x['obj_name'])
|
||||
|
||||
return Response(result)
|
||||
|
||||
|
@ -54,7 +54,7 @@ class User(APIView):
|
||||
|
||||
# update account telephone
|
||||
if info_dict['telephone']:
|
||||
DetailedProfile.objects.add_or_update(email, department=None , telephone=info_dict['telephone'])
|
||||
DetailedProfile.objects.add_or_update(email, department=None, telephone=info_dict['telephone'])
|
||||
|
||||
# update user list_in_address_book
|
||||
if info_dict['list_in_address_book']:
|
||||
@ -70,7 +70,7 @@ class User(APIView):
|
||||
email = request.user.username
|
||||
|
||||
if not ENABLE_UPDATE_USER_INFO:
|
||||
error_msg = _(u'Feature disabled.')
|
||||
error_msg = _('Feature disabled.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
# argument check for name
|
||||
@ -78,11 +78,11 @@ class User(APIView):
|
||||
if name:
|
||||
name = name.strip()
|
||||
if len(name) > 64:
|
||||
error_msg = _(u'Name is too long (maximum is 64 characters)')
|
||||
error_msg = _('Name is too long (maximum is 64 characters)')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
if "/" in name:
|
||||
error_msg = _(u"Name should not include '/'.")
|
||||
error_msg = _("Name should not include '/'.")
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
# argument check for contact_email
|
||||
@ -90,7 +90,7 @@ class User(APIView):
|
||||
if contact_email:
|
||||
|
||||
if not ENABLE_USER_SET_CONTACT_EMAIL:
|
||||
error_msg = _(u'Feature disabled.')
|
||||
error_msg = _('Feature disabled.')
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
profile = Profile.objects.get_profile_by_contact_email(contact_email)
|
||||
|
@ -46,17 +46,17 @@ class UserAvatarView(APIView):
|
||||
|
||||
(root, ext) = os.path.splitext(image_file.name.lower())
|
||||
if AVATAR_ALLOWED_FILE_EXTS and ext not in AVATAR_ALLOWED_FILE_EXTS:
|
||||
error_msg = _(u"%(ext)s is an invalid file extension. Authorized extensions are : %(valid_exts_list)s") % {'ext' : ext, 'valid_exts_list' : ", ".join(AVATAR_ALLOWED_FILE_EXTS)}
|
||||
error_msg = _("%(ext)s is an invalid file extension. Authorized extensions are : %(valid_exts_list)s") % {'ext' : ext, 'valid_exts_list' : ", ".join(AVATAR_ALLOWED_FILE_EXTS)}
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
if image_file.size > AVATAR_MAX_SIZE:
|
||||
error_msg = _(u"Your file is too big (%(size)s), the maximum allowed size is %(max_valid_size)s") % { 'size' : filesizeformat(image_file.size), 'max_valid_size' : filesizeformat(AVATAR_MAX_SIZE)}
|
||||
error_msg = _("Your file is too big (%(size)s), the maximum allowed size is %(max_valid_size)s") % { 'size' : filesizeformat(image_file.size), 'max_valid_size' : filesizeformat(AVATAR_MAX_SIZE)}
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
username = request.user.username
|
||||
count = Avatar.objects.filter(emailuser=username).count()
|
||||
if AVATAR_MAX_AVATARS_PER_USER > 1 and count >= AVATAR_MAX_AVATARS_PER_USER:
|
||||
error_msg = _(u"You already have %(nb_avatars)d avatars, and the maximum allowed is %(nb_max_avatars)d.") % { 'nb_avatars' : count, 'nb_max_avatars' : AVATAR_MAX_AVATARS_PER_USER}
|
||||
error_msg = _("You already have %(nb_avatars)d avatars, and the maximum allowed is %(nb_max_avatars)d.") % { 'nb_avatars' : count, 'nb_max_avatars' : AVATAR_MAX_AVATARS_PER_USER}
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
try:
|
||||
|
@ -2,7 +2,7 @@
|
||||
import re
|
||||
import datetime
|
||||
import time
|
||||
import urllib
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import logging
|
||||
|
||||
from rest_framework import status
|
||||
@ -79,7 +79,7 @@ def get_user_contact_email_dict(email_list):
|
||||
email_list = set(email_list)
|
||||
user_contact_email_dict = {}
|
||||
for email in email_list:
|
||||
if not user_contact_email_dict.has_key(email):
|
||||
if email not in user_contact_email_dict:
|
||||
user_contact_email_dict[email] = email2contact_email(email)
|
||||
|
||||
return user_contact_email_dict
|
||||
@ -88,7 +88,7 @@ def get_user_name_dict(email_list):
|
||||
email_list = set(email_list)
|
||||
user_name_dict = {}
|
||||
for email in email_list:
|
||||
if not user_name_dict.has_key(email):
|
||||
if email not in user_name_dict:
|
||||
user_name_dict[email] = email2nickname(email)
|
||||
|
||||
return user_name_dict
|
||||
@ -97,7 +97,7 @@ def get_repo_dict(repo_id_list):
|
||||
repo_id_list = set(repo_id_list)
|
||||
repo_dict = {}
|
||||
for repo_id in repo_id_list:
|
||||
if not repo_dict.has_key(repo_id):
|
||||
if repo_id not in repo_dict:
|
||||
repo_dict[repo_id] = ''
|
||||
repo = seafile_api.get_repo(repo_id)
|
||||
if repo:
|
||||
@ -110,10 +110,10 @@ def get_group_dict(group_id_list):
|
||||
group_id_list = set(group_id_list)
|
||||
group_dict = {}
|
||||
for group_id in group_id_list:
|
||||
if not group_dict.has_key(group_id):
|
||||
if group_id not in group_dict:
|
||||
group_dict[group_id] = ''
|
||||
group = ccnet_api.get_group(int(group_id))
|
||||
print group
|
||||
print(group)
|
||||
if group:
|
||||
group_dict[group_id] = group
|
||||
|
||||
@ -154,29 +154,29 @@ def generate_links_header_for_paginator(base_url, page, per_page, total_count, o
|
||||
else:
|
||||
return False
|
||||
|
||||
if type(option_dict) is not dict:
|
||||
if not isinstance(option_dict, dict):
|
||||
return ''
|
||||
|
||||
query_dict = {'page': 1, 'per_page': per_page}
|
||||
query_dict.update(option_dict)
|
||||
|
||||
# generate first page url
|
||||
first_page_url = base_url + '?' + urllib.urlencode(query_dict)
|
||||
first_page_url = base_url + '?' + urllib.parse.urlencode(query_dict)
|
||||
|
||||
# generate last page url
|
||||
last_page_query_dict = {'page': (total_count / per_page) + 1}
|
||||
query_dict.update(last_page_query_dict)
|
||||
last_page_url = base_url + '?' + urllib.urlencode(query_dict)
|
||||
last_page_url = base_url + '?' + urllib.parse.urlencode(query_dict)
|
||||
|
||||
# generate next page url
|
||||
next_page_query_dict = {'page': page + 1}
|
||||
query_dict.update(next_page_query_dict)
|
||||
next_page_url = base_url + '?' + urllib.urlencode(query_dict)
|
||||
next_page_url = base_url + '?' + urllib.parse.urlencode(query_dict)
|
||||
|
||||
# generate prev page url
|
||||
prev_page_query_dict = {'page': page - 1}
|
||||
query_dict.update(prev_page_query_dict)
|
||||
prev_page_url = base_url + '?' + urllib.urlencode(query_dict)
|
||||
prev_page_url = base_url + '?' + urllib.parse.urlencode(query_dict)
|
||||
|
||||
# generate `Links` header
|
||||
links_header = ''
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import os
|
||||
import logging
|
||||
import urllib2
|
||||
import urllib.request, urllib.error, urllib.parse
|
||||
import posixpath
|
||||
|
||||
from rest_framework import status
|
||||
@ -61,13 +61,13 @@ class WikiPagesView(APIView):
|
||||
|
||||
pages = get_wiki_pages(repo)
|
||||
wiki_pages_object = []
|
||||
for _, page_name in pages.iteritems():
|
||||
for _, page_name in pages.items():
|
||||
wiki_page_object = get_wiki_page_object(wiki, page_name)
|
||||
wiki_pages_object.append(wiki_page_object)
|
||||
|
||||
# sort pages by name
|
||||
wiki_pages_object.sort(lambda x, y: cmp(x['name'].lower(),
|
||||
y['name'].lower()))
|
||||
wiki_pages_object.sort(
|
||||
key=lambda x: x['name'].lower())
|
||||
|
||||
return Response({
|
||||
"data": wiki_pages_object
|
||||
@ -161,7 +161,7 @@ class WikiPageView(APIView):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
url = get_inner_file_url(repo, wiki_dirent.obj_id, wiki_dirent.obj_name)
|
||||
file_response = urllib2.urlopen(url)
|
||||
file_response = urllib.request.urlopen(url)
|
||||
content = file_response.read()
|
||||
|
||||
wiki_page_object = get_wiki_page_object(wiki, page_name)
|
||||
@ -332,7 +332,7 @@ class WikiPageContentView(APIView):
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
url = gen_inner_file_get_url(token, file_name)
|
||||
file_response = urllib2.urlopen(url)
|
||||
file_response = urllib.request.urlopen(url)
|
||||
content = file_response.read()
|
||||
|
||||
try:
|
||||
|
@ -47,7 +47,7 @@ class WikisView(APIView):
|
||||
rtype = request.GET.get('type', "")
|
||||
if not rtype:
|
||||
# set all to True, no filter applied
|
||||
filter_by = filter_by.fromkeys(filter_by.iterkeys(), True)
|
||||
filter_by = filter_by.fromkeys(iter(filter_by.keys()), True)
|
||||
|
||||
for f in rtype.split(','):
|
||||
f = f.strip()
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-03-21 08:41
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.utils.timezone
|
||||
@ -41,6 +41,6 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='tokenv2',
|
||||
unique_together=set([('user', 'platform', 'device_id')]),
|
||||
unique_together={('user', 'platform', 'device_id')},
|
||||
),
|
||||
]
|
||||
|
@ -4,6 +4,9 @@ import hmac
|
||||
import datetime
|
||||
from hashlib import sha1
|
||||
|
||||
import operator
|
||||
from functools import cmp_to_key
|
||||
|
||||
from django.db import models
|
||||
from django.utils import timezone
|
||||
|
||||
@ -27,7 +30,7 @@ class Token(models.Model):
|
||||
|
||||
def generate_key(self):
|
||||
unique = str(uuid.uuid4())
|
||||
return hmac.new(unique, digestmod=sha1).hexdigest()
|
||||
return hmac.new(unique.encode('utf-8'), digestmod=sha1).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return self.key
|
||||
@ -72,13 +75,12 @@ class TokenV2Manager(models.Manager):
|
||||
the same category are listed by most recently used first
|
||||
|
||||
'''
|
||||
ret = cmp(platform_priorities[d1.platform], platform_priorities[d2.platform])
|
||||
if ret != 0:
|
||||
return ret
|
||||
if operator.eq(platform_priorities[d1.platform], platform_priorities[d2.platform]):
|
||||
return operator.lt(d2.last_accessed, d1.last_accessed)
|
||||
else:
|
||||
return operator.lt(platform_priorities[d1.platform], platform_priorities[d2.platform])
|
||||
|
||||
return cmp(d2.last_accessed, d1.last_accessed)
|
||||
|
||||
return [ d.as_dict() for d in sorted(devices, sort_devices) ]
|
||||
return [ d.as_dict() for d in sorted(devices, key=cmp_to_key(sort_devices)) ]
|
||||
|
||||
def _get_token_by_user_device(self, username, platform, device_id):
|
||||
try:
|
||||
@ -172,11 +174,11 @@ class TokenV2(models.Model):
|
||||
|
||||
def generate_key(self):
|
||||
unique = str(uuid.uuid4())
|
||||
return hmac.new(unique, digestmod=sha1).hexdigest()
|
||||
return hmac.new(unique.encode('utf-8'), digestmod=sha1).hexdigest()
|
||||
|
||||
def __unicode__(self):
|
||||
return "TokenV2{user=%(user)s,device=%(device_name)s}" % \
|
||||
dict(user=self.user,device_name=self.device_name)
|
||||
dict(user=self.user, device_name=self.device_name)
|
||||
|
||||
def is_desktop_client(self):
|
||||
return str(self.platform) in ('windows', 'linux', 'mac')
|
||||
|
@ -2,7 +2,7 @@
|
||||
"""
|
||||
Provides various throttling policies.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache as default_cache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
@ -104,12 +104,12 @@ def get_groups(email):
|
||||
if len(msg) >= 1:
|
||||
mtime = get_timestamp(msg[0].timestamp)
|
||||
group = {
|
||||
"id":g.id,
|
||||
"name":g.group_name,
|
||||
"creator":g.creator_name,
|
||||
"ctime":g.timestamp,
|
||||
"mtime":mtime,
|
||||
"msgnum":grpmsgs[g.id],
|
||||
"id": g.id,
|
||||
"name": g.group_name,
|
||||
"creator": g.creator_name,
|
||||
"ctime": g.timestamp,
|
||||
"mtime": mtime,
|
||||
"msgnum": grpmsgs[g.id],
|
||||
}
|
||||
group_json.append(group)
|
||||
|
||||
@ -227,11 +227,11 @@ def group_msg_to_json(msg, get_all_replies):
|
||||
replies = []
|
||||
for reply in msg.replies:
|
||||
r = {
|
||||
'from_email' : reply.from_email,
|
||||
'nickname' : email2nickname(reply.from_email),
|
||||
'timestamp' : get_timestamp(reply.timestamp),
|
||||
'msg' : reply.message,
|
||||
'msgid' : reply.id,
|
||||
'from_email': reply.from_email,
|
||||
'nickname': email2nickname(reply.from_email),
|
||||
'timestamp': get_timestamp(reply.timestamp),
|
||||
'msg': reply.message,
|
||||
'msgid': reply.id,
|
||||
}
|
||||
replies.append(r)
|
||||
|
||||
|
@ -9,7 +9,7 @@ import datetime
|
||||
import posixpath
|
||||
import re
|
||||
from dateutil.relativedelta import relativedelta
|
||||
from urllib2 import quote
|
||||
from urllib.parse import quote
|
||||
|
||||
from rest_framework import parsers
|
||||
from rest_framework import status
|
||||
@ -129,13 +129,13 @@ except ImportError:
|
||||
from pysearpc import SearpcError, SearpcObjEncoder
|
||||
import seaserv
|
||||
from seaserv import seafserv_threaded_rpc, \
|
||||
get_personal_groups_by_user, get_session_info, is_personal_repo, \
|
||||
get_repo, check_permission, get_commits, is_passwd_set,\
|
||||
get_personal_groups_by_user, is_personal_repo, \
|
||||
get_repo, check_permission, get_commits,\
|
||||
check_quota, list_share_repos, get_group_repos_by_owner, get_group_repoids, \
|
||||
remove_share, get_group, \
|
||||
get_commit, get_file_id_by_path, MAX_DOWNLOAD_DIR_SIZE, edit_repo, \
|
||||
ccnet_threaded_rpc, get_personal_groups, seafile_api, \
|
||||
create_org, ccnet_api, send_message
|
||||
create_org, ccnet_api
|
||||
|
||||
from constance import config
|
||||
|
||||
@ -345,11 +345,11 @@ class AccountInfo(APIView):
|
||||
if name is not None:
|
||||
if len(name) > 64:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u'Name is too long (maximum is 64 characters)'))
|
||||
_('Name is too long (maximum is 64 characters)'))
|
||||
|
||||
if "/" in name:
|
||||
return api_error(status.HTTP_400_BAD_REQUEST,
|
||||
_(u"Name should not include '/'."))
|
||||
_("Name should not include '/'."))
|
||||
|
||||
email_interval = request.data.get("email_notification_interval", None)
|
||||
if email_interval is not None:
|
||||
@ -524,7 +524,7 @@ class Search(APIView):
|
||||
suffixes = []
|
||||
if len(custom_ftypes) > 0:
|
||||
for ftp in custom_ftypes:
|
||||
if SEARCH_FILEEXT.has_key(ftp):
|
||||
if ftp in SEARCH_FILEEXT:
|
||||
for ext in SEARCH_FILEEXT[ftp]:
|
||||
suffixes.append(ext)
|
||||
|
||||
@ -590,7 +590,7 @@ class Search(APIView):
|
||||
e['permission'] = permission
|
||||
|
||||
# get repo type
|
||||
if repo_type_map.has_key(repo_id):
|
||||
if repo_id in repo_type_map:
|
||||
e['repo_type'] = repo_type_map[repo_id]
|
||||
else:
|
||||
e['repo_type'] = ''
|
||||
@ -605,7 +605,7 @@ def repo_download_info(request, repo_id, gen_sync_token=True):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, 'Library not found.')
|
||||
|
||||
# generate download url for client
|
||||
relay_id = get_session_info().id
|
||||
relay_id = 0
|
||||
addr, port = get_ccnet_server_addr_port()
|
||||
email = request.user.username
|
||||
if gen_sync_token:
|
||||
@ -670,7 +670,7 @@ class Repos(APIView):
|
||||
rtype = request.GET.get('type', "")
|
||||
if not rtype:
|
||||
# set all to True, no filter applied
|
||||
filter_by = filter_by.fromkeys(filter_by.iterkeys(), True)
|
||||
filter_by = filter_by.fromkeys(iter(filter_by.keys()), True)
|
||||
|
||||
for f in rtype.split(','):
|
||||
f = f.strip()
|
||||
@ -695,14 +695,14 @@ class Repos(APIView):
|
||||
ret_corrupted=True)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
modifiers_set = set([x.last_modifier for x in owned_repos])
|
||||
modifiers_set = {x.last_modifier for x in owned_repos}
|
||||
for e in modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
if e not in nickname_dict:
|
||||
nickname_dict[e] = email2nickname(e)
|
||||
|
||||
owned_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
owned_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
for r in owned_repos:
|
||||
# do not return virtual repos
|
||||
if r.is_virtual:
|
||||
@ -753,15 +753,15 @@ class Repos(APIView):
|
||||
get_repos_with_admin_permission(email)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
owners_set = set([x.user for x in shared_repos])
|
||||
modifiers_set = set([x.last_modifier for x in shared_repos])
|
||||
owners_set = {x.user for x in shared_repos}
|
||||
modifiers_set = {x.last_modifier for x in shared_repos}
|
||||
for e in owners_set | modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
if e not in nickname_dict:
|
||||
nickname_dict[e] = email2nickname(e)
|
||||
|
||||
shared_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
shared_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
for r in shared_repos:
|
||||
if q and q.lower() not in r.name.lower():
|
||||
continue
|
||||
@ -775,7 +775,7 @@ class Repos(APIView):
|
||||
if not is_web_request(request):
|
||||
continue
|
||||
|
||||
r.password_need = is_passwd_set(r.repo_id, email)
|
||||
r.password_need = seafile_api.is_password_set(r.repo_id, email)
|
||||
repo = {
|
||||
"type": "srepo",
|
||||
"id": r.repo_id,
|
||||
@ -784,7 +784,6 @@ class Repos(APIView):
|
||||
"owner_contact_email": contact_email_dict.get(r.user, ''),
|
||||
"name": r.repo_name,
|
||||
"owner_nickname": nickname_dict.get(r.user, ''),
|
||||
"owner_name": nickname_dict.get(r.user, ''),
|
||||
"mtime": r.last_modify,
|
||||
"mtime_relative": translate_seahub_time(r.last_modify),
|
||||
"modifier_email": r.last_modifier,
|
||||
@ -816,11 +815,11 @@ class Repos(APIView):
|
||||
else:
|
||||
group_repos = seafile_api.get_group_repos_by_user(email)
|
||||
|
||||
group_repos.sort(lambda x, y: cmp(y.last_modify, x.last_modify))
|
||||
group_repos.sort(key=lambda x: x.last_modify, reverse=True)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
share_from_set = set([x.user for x in group_repos])
|
||||
modifiers_set = set([x.last_modifier for x in group_repos])
|
||||
share_from_set = {x.user for x in group_repos}
|
||||
modifiers_set = {x.last_modifier for x in group_repos}
|
||||
for e in modifiers_set | share_from_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -863,8 +862,8 @@ class Repos(APIView):
|
||||
public_repos = list_inner_pub_repos(request)
|
||||
|
||||
# Reduce memcache fetch ops.
|
||||
share_from_set = set([x.user for x in public_repos])
|
||||
modifiers_set = set([x.last_modifier for x in public_repos])
|
||||
share_from_set = {x.user for x in public_repos}
|
||||
modifiers_set = {x.last_modifier for x in public_repos}
|
||||
for e in modifiers_set | share_from_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -906,7 +905,7 @@ class Repos(APIView):
|
||||
org_id = request.user.org.org_id
|
||||
|
||||
try:
|
||||
send_message('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id))
|
||||
seafile_api.publish_event('seahub.stats', 'user-login\t%s\t%s\t%s' % (email, timestamp, org_id))
|
||||
except Exception as e:
|
||||
logger.error('Error when sending user-login message: %s' % str(e))
|
||||
response = HttpResponse(json.dumps(repos_json), status=200,
|
||||
@ -985,7 +984,7 @@ class Repos(APIView):
|
||||
return None, api_error(status.HTTP_403_FORBIDDEN,
|
||||
'NOT allow to create encrypted library.')
|
||||
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.create_org_repo(repo_name,
|
||||
repo_desc, username, org_id, passwd,
|
||||
enc_version=settings.ENCRYPTED_LIBRARY_VERSION)
|
||||
@ -1049,7 +1048,7 @@ class Repos(APIView):
|
||||
return None, api_error(status.HTTP_400_BAD_REQUEST,
|
||||
'You must provide magic, random_key and enc_version.')
|
||||
|
||||
if org_id > 0:
|
||||
if org_id and org_id > 0:
|
||||
repo_id = seafile_api.create_org_enc_repo(repo_id, repo_name, repo_desc,
|
||||
username, magic, random_key,
|
||||
salt, enc_version, org_id)
|
||||
@ -1196,7 +1195,7 @@ def set_repo_password(request, repo, password):
|
||||
if ENABLE_RESET_ENCRYPTED_REPO_PASSWORD:
|
||||
add_encrypted_repo_secret_key_to_database(repo_id, password)
|
||||
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
if e.msg == 'Bad arguments':
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, e.msg)
|
||||
elif e.msg == 'Repo is not encrypted':
|
||||
@ -1254,14 +1253,14 @@ class Repo(APIView):
|
||||
root_id = current_commit.root_id if current_commit else None
|
||||
|
||||
repo_json = {
|
||||
"type":"repo",
|
||||
"id":repo.id,
|
||||
"owner":owner,
|
||||
"name":repo.name,
|
||||
"mtime":repo.latest_modify,
|
||||
"size":repo.size,
|
||||
"encrypted":repo.encrypted,
|
||||
"root":root_id,
|
||||
"type": "repo",
|
||||
"id": repo.id,
|
||||
"owner": owner,
|
||||
"name": repo.name,
|
||||
"mtime": repo.latest_modify,
|
||||
"size": repo.size,
|
||||
"encrypted": repo.encrypted,
|
||||
"root": root_id,
|
||||
"permission": check_permission(repo.id, username),
|
||||
"modifier_email": repo.last_modifier,
|
||||
"modifier_contact_email": email2contact_email(repo.last_modifier),
|
||||
@ -1583,7 +1582,7 @@ class RepoOwner(APIView):
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
if org_id and not ccnet_api.org_user_exists(org_id, new_owner):
|
||||
error_msg = _(u'User %s not found in organization.') % new_owner
|
||||
error_msg = _('User %s not found in organization.') % new_owner
|
||||
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||
|
||||
# permission check
|
||||
@ -1598,12 +1597,12 @@ class RepoOwner(APIView):
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
if not new_owner_obj.permissions.can_add_repo():
|
||||
error_msg = _(u'Transfer failed: role of %s is %s, can not add library.') % \
|
||||
error_msg = _('Transfer failed: role of %s is %s, can not add library.') % \
|
||||
(new_owner, new_owner_obj.role)
|
||||
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
|
||||
|
||||
if new_owner == repo_owner:
|
||||
error_msg = _(u"Library can not be transferred to owner.")
|
||||
error_msg = _("Library can not be transferred to owner.")
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
|
||||
pub_repos = []
|
||||
@ -1741,7 +1740,7 @@ class FileBlockDownloadLinkView(APIView):
|
||||
'You do not have permission to access this repo.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(
|
||||
repo_id, file_id, 'downloadblks', request.user.username)
|
||||
@ -1777,7 +1776,7 @@ class UploadLinkView(APIView):
|
||||
'You do not have permission to access this folder.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||
'dummy', 'upload', request.user.username, use_onetime=False)
|
||||
@ -1825,7 +1824,7 @@ class UpdateLinkView(APIView):
|
||||
'You do not have permission to access this folder.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||
'dummy', 'update', request.user.username, use_onetime=False)
|
||||
@ -1869,7 +1868,7 @@ class UploadBlksLinkView(APIView):
|
||||
'You do not have permission to access this folder.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||
'dummy', 'upload-blks-api', request.user.username, use_onetime=False)
|
||||
@ -1914,7 +1913,7 @@ class UploadBlksLinkView(APIView):
|
||||
'You do not have permission to access this folder.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||
'dummy', 'upload', request.user.username, use_onetime=False)
|
||||
@ -1962,7 +1961,7 @@ class UpdateBlksLinkView(APIView):
|
||||
'You do not have permission to access this folder.')
|
||||
|
||||
if check_quota(repo_id) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
token = seafile_api.get_fileserver_access_token(repo_id,
|
||||
'dummy', 'update-blks-api', request.user.username, use_onetime=False)
|
||||
@ -2012,7 +2011,7 @@ def get_dir_file_recursively(username, repo_id, path, all_dirs):
|
||||
file_list = [item for item in all_dirs if item['type'] == 'file']
|
||||
contact_email_dict = {}
|
||||
nickname_dict = {}
|
||||
modifiers_set = set([x['modifier_email'] for x in file_list])
|
||||
modifiers_set = {x['modifier_email'] for x in file_list}
|
||||
for e in modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -2042,7 +2041,7 @@ def get_dir_entrys_by_id(request, repo, path, dir_id, request_type=None):
|
||||
dirs = seafile_api.list_dir_with_perm(repo.id, path, dir_id,
|
||||
username, -1, -1)
|
||||
dirs = dirs if dirs else []
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
"Failed to list dir.")
|
||||
@ -2084,7 +2083,7 @@ def get_dir_entrys_by_id(request, repo, path, dir_id, request_type=None):
|
||||
# Use dict to reduce memcache fetch cost in large for-loop.
|
||||
contact_email_dict = {}
|
||||
nickname_dict = {}
|
||||
modifiers_set = set([x['modifier_email'] for x in file_list])
|
||||
modifiers_set = {x['modifier_email'] for x in file_list}
|
||||
for e in modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -2108,8 +2107,8 @@ def get_dir_entrys_by_id(request, repo, path, dir_id, request_type=None):
|
||||
if normalize_file_path(file_path) in starred_files:
|
||||
e['starred'] = True
|
||||
|
||||
dir_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
file_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
dir_list.sort(key=lambda x: x['name'].lower())
|
||||
file_list.sort(key=lambda x: x['name'].lower())
|
||||
|
||||
if request_type == 'f':
|
||||
dentrys = file_list
|
||||
@ -2142,7 +2141,7 @@ def get_shared_link(request, repo_id, path):
|
||||
|
||||
try:
|
||||
fs.save()
|
||||
except IntegrityError, e:
|
||||
except IntegrityError as e:
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, e.msg)
|
||||
|
||||
http_or_https = request.is_secure() and 'https' or 'http'
|
||||
@ -2207,7 +2206,7 @@ def get_repo_file(request, repo_id, file_id, file_name, op,
|
||||
def reloaddir(request, repo, parent_dir):
|
||||
try:
|
||||
dir_id = seafile_api.get_dir_id_by_path(repo.id, parent_dir)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
"Failed to get dir id by path")
|
||||
@ -2260,7 +2259,7 @@ class OpDeleteView(APIView):
|
||||
allowed_file_names = []
|
||||
locked_files = get_locked_files_by_dir(request, repo_id, parent_dir)
|
||||
for file_name in file_names.split(':'):
|
||||
if file_name not in locked_files.keys():
|
||||
if file_name not in list(locked_files.keys()):
|
||||
# file is not locked
|
||||
allowed_file_names.append(file_name)
|
||||
elif locked_files[file_name] == username:
|
||||
@ -2333,7 +2332,7 @@ class OpMoveView(APIView):
|
||||
allowed_obj_names = []
|
||||
locked_files = get_locked_files_by_dir(request, repo_id, parent_dir)
|
||||
for file_name in obj_names.split(':'):
|
||||
if file_name not in locked_files.keys():
|
||||
if file_name not in list(locked_files.keys()):
|
||||
# file is not locked
|
||||
allowed_obj_names.append(file_name)
|
||||
elif locked_files[file_name] == username:
|
||||
@ -2373,7 +2372,7 @@ class OpMoveView(APIView):
|
||||
|
||||
# check if above quota for dst repo
|
||||
if seafile_api.check_quota(dst_repo, total_size) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
# make new name
|
||||
dst_dirents = seafile_api.list_dir_by_path(dst_repo, dst_dir)
|
||||
@ -2487,7 +2486,7 @@ class OpCopyView(APIView):
|
||||
|
||||
# check if above quota for dst repo
|
||||
if seafile_api.check_quota(dst_repo, total_size) < 0:
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _(u"Out of quota."))
|
||||
return api_error(HTTP_443_ABOVE_QUOTA, _("Out of quota."))
|
||||
|
||||
# make new name
|
||||
dst_dirents = seafile_api.list_dir_by_path(dst_repo, dst_dir)
|
||||
@ -2849,7 +2848,7 @@ class FileView(APIView):
|
||||
try:
|
||||
seafile_api.rename_file(repo_id, parent_dir, oldname, newname,
|
||||
username)
|
||||
except SearpcError,e:
|
||||
except SearpcError as e:
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
"Failed to rename file: %s" % e)
|
||||
|
||||
@ -2892,7 +2891,7 @@ class FileView(APIView):
|
||||
dst_dir, new_filename,
|
||||
replace=False, username=username,
|
||||
need_progress=0, synchronous=1)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
"SearpcError:" + e.msg)
|
||||
|
||||
@ -2984,7 +2983,7 @@ class FileView(APIView):
|
||||
try:
|
||||
seafile_api.post_empty_file(repo_id, parent_dir,
|
||||
new_file_name, username)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
'Failed to create file.')
|
||||
|
||||
@ -3038,7 +3037,7 @@ class FileView(APIView):
|
||||
try:
|
||||
seafile_api.lock_file(repo_id, path.lstrip('/'), username, expire)
|
||||
return Response('success', status=status.HTTP_200_OK)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal error')
|
||||
|
||||
@ -3052,7 +3051,7 @@ class FileView(APIView):
|
||||
try:
|
||||
seafile_api.unlock_file(repo_id, path.lstrip('/'))
|
||||
return Response('success', status=status.HTTP_200_OK)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal error')
|
||||
else:
|
||||
@ -3617,7 +3616,7 @@ class DirView(APIView):
|
||||
resp = Response('success', status=status.HTTP_201_CREATED)
|
||||
uri = reverse('DirView', args=[repo_id], request=request)
|
||||
resp['Location'] = uri + '?p=' + quote(
|
||||
parent_dir.encode('utf-8') + '/' + new_dir_name.encode('utf-8'))
|
||||
parent_dir.encode('utf-8') + '/'.encode('utf-8') + new_dir_name.encode('utf-8'))
|
||||
return resp
|
||||
|
||||
elif operation.lower() == 'rename':
|
||||
@ -3646,7 +3645,7 @@ class DirView(APIView):
|
||||
seafile_api.rename_file(repo_id, parent_dir, old_dir_name,
|
||||
checked_newname, username)
|
||||
return Response('success', status=status.HTTP_200_OK)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
'Failed to rename folder.')
|
||||
@ -3786,13 +3785,13 @@ class DirSubRepoView(APIView):
|
||||
error_msg = 'Bad arguments'
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
elif e.msg == 'Incorrect password':
|
||||
error_msg = _(u'Wrong password')
|
||||
error_msg = _('Wrong password')
|
||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||
elif e.msg == 'Internal server error':
|
||||
error_msg = _(u'Internal server error')
|
||||
error_msg = _('Internal server error')
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
else:
|
||||
error_msg = _(u'Decrypt library error')
|
||||
error_msg = _('Decrypt library error')
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
|
||||
|
||||
# create sub-lib for encrypted repo
|
||||
@ -3883,7 +3882,7 @@ class BeSharedRepos(APIView):
|
||||
shared_repos.append(r)
|
||||
|
||||
if not CLOUD_MODE:
|
||||
shared_repos += seaserv.list_inner_pub_repos(username)
|
||||
shared_repos += seafile_api.get_inner_pub_repo_list()
|
||||
|
||||
return HttpResponse(json.dumps(shared_repos, cls=SearpcObjEncoder),
|
||||
status=200, content_type=json_content_type)
|
||||
@ -3960,7 +3959,7 @@ class SharedFileDetailView(APIView):
|
||||
file_id = seafile_api.get_file_id_by_path(repo_id, path)
|
||||
commits = get_file_revisions_after_renamed(repo_id, path)
|
||||
c = commits[0]
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(HTTP_520_OPERATION_FAILED,
|
||||
"Failed to get file id by path.")
|
||||
|
||||
@ -4089,7 +4088,7 @@ class SharedDirView(APIView):
|
||||
dirs = seafserv_threaded_rpc.list_dir_with_perm(repo_id, real_path, dir_id,
|
||||
username, -1, -1)
|
||||
dirs = dirs if dirs else []
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
return api_error(HTTP_520_OPERATION_FAILED, "Failed to list dir.")
|
||||
|
||||
@ -4115,8 +4114,8 @@ class SharedDirView(APIView):
|
||||
else:
|
||||
file_list.append(entry)
|
||||
|
||||
dir_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
file_list.sort(lambda x, y: cmp(x['name'].lower(), y['name'].lower()))
|
||||
dir_list.sort(key=lambda x: x['name'].lower())
|
||||
file_list.sort(key=lambda x: x['name'].lower())
|
||||
dentrys = dir_list + file_list
|
||||
|
||||
content_type = 'application/json; charset=utf-8'
|
||||
@ -4298,7 +4297,7 @@ class SharedRepo(APIView):
|
||||
try:
|
||||
seafile_api.share_repo(repo_id, username, u, permission)
|
||||
shared_users.append(u)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
logger.error(e)
|
||||
notsharable_errors.append(e)
|
||||
|
||||
@ -4313,7 +4312,7 @@ class SharedRepo(APIView):
|
||||
for s_user in shared_users:
|
||||
try:
|
||||
remove_share(repo_id, username, s_user)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
# ignoring this error, go to next unsharing
|
||||
continue
|
||||
|
||||
@ -4348,7 +4347,7 @@ class SharedRepo(APIView):
|
||||
try:
|
||||
seafile_api.set_group_repo(repo_id,
|
||||
group_id, username, permission)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
"Searpc Error: " + e.msg)
|
||||
try:
|
||||
@ -4555,7 +4554,7 @@ class Groups(APIView):
|
||||
group_id = ccnet_api.create_group(group_name, username)
|
||||
return HttpResponse(json.dumps({'success': True, 'group_id': group_id}),
|
||||
content_type=content_type)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
result['error'] = e.msg
|
||||
return HttpResponse(json.dumps(result), status=500,
|
||||
content_type=content_type)
|
||||
@ -4648,7 +4647,7 @@ class GroupMembers(APIView):
|
||||
|
||||
try:
|
||||
ccnet_threaded_rpc.group_add_member(group.id, request.user.username, user_name)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Unable to add user to group')
|
||||
|
||||
return HttpResponse(json.dumps({'success': True}), status=200, content_type=json_content_type)
|
||||
@ -4673,7 +4672,7 @@ class GroupMembers(APIView):
|
||||
|
||||
try:
|
||||
ccnet_threaded_rpc.group_remove_member(group.id, request.user.username, user_name)
|
||||
except SearpcError, e:
|
||||
except SearpcError as e:
|
||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Unable to add user to group')
|
||||
|
||||
return HttpResponse(json.dumps({'success': True}), status=200, content_type=json_content_type)
|
||||
@ -4783,14 +4782,14 @@ class GroupRepos(APIView):
|
||||
else:
|
||||
repos = seafile_api.get_repos_by_group(group.id)
|
||||
|
||||
repos.sort(lambda x, y: cmp(y.last_modified, x.last_modified))
|
||||
repos.sort(key=lambda x: x.last_modified, reverse=True)
|
||||
group.is_staff = is_group_staff(group, request.user)
|
||||
|
||||
# Use dict to reduce memcache fetch cost in large for-loop.
|
||||
contact_email_dict = {}
|
||||
nickname_dict = {}
|
||||
owner_set = set([x.user for x in repos])
|
||||
modifiers_set = set([x.modifier for x in repos])
|
||||
owner_set = {x.user for x in repos}
|
||||
modifiers_set = {x.modifier for x in repos}
|
||||
for e in owner_set | modifiers_set:
|
||||
if e not in contact_email_dict:
|
||||
contact_email_dict[e] = email2contact_email(e)
|
||||
@ -4949,7 +4948,7 @@ class OfficeConvertQueryStatus(APIView):
|
||||
else:
|
||||
ret['success'] = True
|
||||
ret['status'] = d.status
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logging.exception('failed to call query_office_convert_status')
|
||||
ret['error'] = str(e)
|
||||
|
||||
|
@ -19,9 +19,9 @@ def load_backend(path):
|
||||
module, attr = path[:i], path[i+1:]
|
||||
try:
|
||||
mod = import_module(module)
|
||||
except ImportError, e:
|
||||
except ImportError as e:
|
||||
raise ImproperlyConfigured('Error importing authentication backend %s: "%s"' % (module, e))
|
||||
except ValueError, e:
|
||||
except ValueError as e:
|
||||
raise ImproperlyConfigured('Error importing authentication backends. Is AUTHENTICATION_BACKENDS a correctly defined list or tuple?')
|
||||
try:
|
||||
cls = getattr(mod, attr)
|
||||
|
@ -194,7 +194,7 @@ class SeafileRemoteUserBackend(AuthBackend):
|
||||
"""
|
||||
user_info = {}
|
||||
|
||||
for header, user_info_key in self.remote_user_attribute_map.items():
|
||||
for header, user_info_key in list(self.remote_user_attribute_map.items()):
|
||||
value = request.META.get(header, None)
|
||||
if value:
|
||||
user_info[user_info_key] = value
|
||||
|
@ -112,7 +112,7 @@ class PasswordResetForm(forms.Form):
|
||||
Validates that a user exists with the given e-mail address.
|
||||
"""
|
||||
if not IS_EMAIL_CONFIGURED:
|
||||
raise forms.ValidationError(_(u'Failed to send email, email service is not properly configured, please contact administrator.'))
|
||||
raise forms.ValidationError(_('Failed to send email, email service is not properly configured, please contact administrator.'))
|
||||
|
||||
email = self.cleaned_data["email"].lower().strip()
|
||||
|
||||
|
@ -122,7 +122,7 @@ class SeafileRemoteUserMiddleware(MiddlewareMixin):
|
||||
if not user:
|
||||
if not getattr(settings, 'REMOTE_USER_CREATE_UNKNOWN_USER', True):
|
||||
return render(request, 'remote_user/create_unknown_user_false.html')
|
||||
return render(request, 'remote_user/error.html')
|
||||
return render(request, 'remote_user/error.html')
|
||||
|
||||
if user:
|
||||
if not user.is_active:
|
||||
|
@ -1,7 +1,7 @@
|
||||
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||
import datetime
|
||||
import hashlib
|
||||
import urllib
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import logging
|
||||
|
||||
# import auth
|
||||
@ -21,7 +21,7 @@ def get_hexdigest(algorithm, salt, raw_password):
|
||||
Returns a string of the hexdigest of the given plaintext password and salt
|
||||
using the given algorithm ('md5', 'sha1' or 'crypt').
|
||||
"""
|
||||
raw_password, salt = smart_str(raw_password), smart_str(salt)
|
||||
raw_password, salt = smart_str(raw_password).encode('utf-8'), smart_str(salt).encode('utf-8')
|
||||
if algorithm == 'crypt':
|
||||
try:
|
||||
import crypt
|
||||
@ -65,7 +65,7 @@ class AnonymousUser(object):
|
||||
return 'AnonymousUser'
|
||||
|
||||
def __str__(self):
|
||||
return unicode(self).encode('utf-8')
|
||||
return str(self).encode('utf-8')
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, self.__class__)
|
||||
@ -156,7 +156,7 @@ class SocialAuthUser(models.Model):
|
||||
|
||||
class Meta:
|
||||
"""Meta data"""
|
||||
app_label = "seahub.work_weixin"
|
||||
app_label = "base"
|
||||
unique_together = ('provider', 'uid')
|
||||
db_table = 'social_auth_usersocialauth'
|
||||
|
||||
|
@ -72,7 +72,7 @@ class PasswordResetTokenGenerator(object):
|
||||
return "%s-%s" % (ts_b36, hash)
|
||||
|
||||
def _num_days(self, dt):
|
||||
return (dt - date(2001,1,1)).days
|
||||
return (dt - date(2001, 1, 1)).days
|
||||
|
||||
def _today(self):
|
||||
# Used for mocking in tests
|
||||
|
@ -217,7 +217,7 @@ def login_simple_check(request):
|
||||
raise Http404
|
||||
|
||||
today = datetime.now().strftime('%Y-%m-%d')
|
||||
expect = hashlib.md5(settings.SECRET_KEY+username+today).hexdigest()
|
||||
expect = hashlib.md5((settings.SECRET_KEY+username+today).encode('utf-8')).hexdigest()
|
||||
if expect == random_key:
|
||||
try:
|
||||
user = User.objects.get(email=username)
|
||||
@ -313,9 +313,9 @@ def password_reset(request, is_admin_site=False, template_name='registration/pas
|
||||
opts['domain_override'] = get_current_site(request).domain
|
||||
try:
|
||||
form.save(**opts)
|
||||
except Exception, e:
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
messages.error(request, _(u'Failed to send email, please contact administrator.'))
|
||||
messages.error(request, _('Failed to send email, please contact administrator.'))
|
||||
return render(request, template_name, {
|
||||
'form': form,
|
||||
})
|
||||
|
@ -16,7 +16,7 @@ def avatar_img(avatar, size):
|
||||
if not avatar.thumbnail_exists(size):
|
||||
avatar.create_thumbnail(size)
|
||||
return mark_safe("""<img src="%s" alt="%s" width="%s" height="%s" />""" %
|
||||
(avatar.avatar_url(size), unicode(avatar), size, size))
|
||||
(avatar.avatar_url(size), str(avatar), size, size))
|
||||
|
||||
class UploadAvatarForm(forms.Form):
|
||||
|
||||
@ -40,7 +40,7 @@ class UploadAvatarForm(forms.Form):
|
||||
if AVATAR_MAX_AVATARS_PER_USER > 1 and \
|
||||
count >= AVATAR_MAX_AVATARS_PER_USER:
|
||||
raise forms.ValidationError(
|
||||
_(u"You already have %(nb_avatars)d avatars, and the maximum allowed is %(nb_max_avatars)d.") %
|
||||
_("You already have %(nb_avatars)d avatars, and the maximum allowed is %(nb_max_avatars)d.") %
|
||||
{ 'nb_avatars' : count, 'nb_max_avatars' : AVATAR_MAX_AVATARS_PER_USER})
|
||||
return
|
||||
|
||||
|
@ -32,9 +32,9 @@ class Command(BaseCommand):
|
||||
for avatar in Avatar.objects.all():
|
||||
try:
|
||||
self._save(avatar.avatar.name, avatar.avatar)
|
||||
print "SUCCESS: migrated Avatar path=%s user=%s" % (avatar.avatar.name, avatar.emailuser)
|
||||
print("SUCCESS: migrated Avatar path=%s user=%s" % (avatar.avatar.name, avatar.emailuser))
|
||||
except AvatarNotFoundError:
|
||||
print "ERROR: Avatar file not found: path=%s user=%s. Skip." % (avatar.avatar.name, avatar.emailuser)
|
||||
print("ERROR: Avatar file not found: path=%s user=%s. Skip." % (avatar.avatar.name, avatar.emailuser))
|
||||
continue
|
||||
|
||||
# try:
|
||||
@ -50,10 +50,10 @@ class Command(BaseCommand):
|
||||
in the name will be converted to forward '/'.
|
||||
"""
|
||||
name = name.replace('\\', '/')
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
try:
|
||||
binary = content.read()
|
||||
except AttributeError, IOError:
|
||||
except AttributeError as IOError:
|
||||
raise AvatarNotFoundError
|
||||
|
||||
size = len(binary)
|
||||
@ -78,7 +78,7 @@ class Command(BaseCommand):
|
||||
return name
|
||||
|
||||
def exists(self, name):
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
query = 'SELECT COUNT(*) FROM %(table)s WHERE %(name_md5_column)s = %%s'
|
||||
query %= self.__dict__
|
||||
cursor = connection.cursor()
|
||||
|
@ -11,5 +11,5 @@ class Command(BaseCommand):
|
||||
def handle(self, **options):
|
||||
for avatar in Avatar.objects.all():
|
||||
for size in AUTO_GENERATE_AVATAR_SIZES:
|
||||
print "Rebuilding Avatar id=%s at size %s." % (avatar.id, size)
|
||||
print("Rebuilding Avatar id=%s at size %s." % (avatar.id, size))
|
||||
avatar.create_thumbnail(size)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-03-21 08:42
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
import datetime
|
||||
from django.db import migrations, models
|
||||
|
@ -14,10 +14,10 @@ from django.utils.encoding import smart_str
|
||||
from django.db.models import signals
|
||||
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
dir(StringIO) # Placate PyFlakes
|
||||
from io import BytesIO
|
||||
dir(BytesIO) # Placate PyFlakes
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
from io import BytesIO
|
||||
|
||||
try:
|
||||
from PIL import Image
|
||||
@ -39,7 +39,7 @@ def avatar_file_path(instance=None, filename=None, size=None, ext=None):
|
||||
if isinstance(instance, Avatar):
|
||||
tmppath = [AVATAR_STORAGE_DIR]
|
||||
if AVATAR_HASH_USERDIRNAMES:
|
||||
tmp = hashlib.md5(instance.emailuser).hexdigest()
|
||||
tmp = hashlib.md5(instance.emailuser.encode('utf-8')).hexdigest()
|
||||
tmppath.extend([tmp[0], tmp[1], tmp[2:]])
|
||||
else:
|
||||
tmppath.append(instance.emailuser)
|
||||
@ -63,7 +63,7 @@ def avatar_file_path(instance=None, filename=None, size=None, ext=None):
|
||||
# File doesn't exist yet
|
||||
if AVATAR_HASH_FILENAMES:
|
||||
(root, ext) = os.path.splitext(filename)
|
||||
filename = hashlib.md5(smart_str(filename)).hexdigest()
|
||||
filename = hashlib.md5(smart_str(filename).encode('utf-8')).hexdigest()
|
||||
filename = filename + ext
|
||||
if size:
|
||||
tmppath.extend(['resized', str(size)])
|
||||
@ -92,7 +92,7 @@ class AvatarBase(object):
|
||||
|
||||
try:
|
||||
orig = self.avatar.storage.open(self.avatar.name, 'rb').read()
|
||||
image = Image.open(StringIO(orig))
|
||||
image = Image.open(BytesIO(orig))
|
||||
|
||||
quality = quality or AVATAR_THUMB_QUALITY
|
||||
(w, h) = image.size
|
||||
@ -106,7 +106,7 @@ class AvatarBase(object):
|
||||
if image.mode != "RGBA":
|
||||
image = image.convert("RGBA")
|
||||
image = image.resize((size, size), AVATAR_RESIZE_METHOD)
|
||||
thumb = StringIO()
|
||||
thumb = BytesIO()
|
||||
image.save(thumb, AVATAR_THUMB_FORMAT, quality=quality)
|
||||
thumb_file = ContentFile(thumb.getvalue())
|
||||
else:
|
||||
@ -141,7 +141,7 @@ class Avatar(models.Model, AvatarBase):
|
||||
date_uploaded = models.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
def __unicode__(self):
|
||||
return _(u'Avatar for %s') % self.emailuser
|
||||
return _('Avatar for %s') % self.emailuser
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
avatars = Avatar.objects.filter(emailuser=self.emailuser)
|
||||
@ -169,7 +169,7 @@ class GroupAvatar(models.Model, AvatarBase):
|
||||
date_uploaded = models.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
def __unicode__(self):
|
||||
return _(u'Avatar for %s') % self.group_id
|
||||
return _('Avatar for %s') % self.group_id
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
super(GroupAvatar, self).save(*args, **kwargs)
|
||||
|
@ -1,8 +1,8 @@
|
||||
# Copyright (c) 2012-2016 Seafile Ltd.
|
||||
import logging
|
||||
import urllib
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
import hashlib
|
||||
from urlparse import urlparse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django import template
|
||||
from django.core.urlresolvers import reverse
|
||||
@ -34,8 +34,8 @@ def avatar_url(user, size=AVATAR_DEFAULT_SIZE):
|
||||
if AVATAR_GRAVATAR_DEFAULT:
|
||||
params['d'] = AVATAR_GRAVATAR_DEFAULT
|
||||
return "http://www.gravatar.com/avatar/%s/?%s" % (
|
||||
hashlib.md5(user.email).hexdigest(),
|
||||
urllib.urlencode(params))
|
||||
hashlib.md5(user.email.encode('utf-8')).hexdigest(),
|
||||
urllib.parse.urlencode(params))
|
||||
else:
|
||||
url = get_default_avatar_url()
|
||||
|
||||
|
@ -39,8 +39,8 @@ class AvatarTestCase(TestCase):
|
||||
'password': 'testpassword',
|
||||
},
|
||||
)
|
||||
self.assertEquals(response.status_code, 302)
|
||||
self.assert_(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
|
||||
|
||||
Image.init()
|
||||
|
||||
@ -50,38 +50,38 @@ class AvatarTestCase(TestCase):
|
||||
class AvatarUploadTests(AvatarTestCase):
|
||||
def testNonImageUpload(self):
|
||||
response = upload_helper(self, "nonimagefile")
|
||||
self.failUnlessEqual(response.status_code, 200)
|
||||
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertNotEqual(response.context['upload_avatar_form'].errors, {})
|
||||
|
||||
def testNormalImageUpload(self):
|
||||
response = upload_helper(self, "test.png")
|
||||
self.failUnlessEqual(response.status_code, 200)
|
||||
self.failUnlessEqual(len(response.redirect_chain), 1)
|
||||
self.failUnlessEqual(response.context['upload_avatar_form'].errors, {})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(response.redirect_chain), 1)
|
||||
self.assertEqual(response.context['upload_avatar_form'].errors, {})
|
||||
avatar = get_primary_avatar(self.user)
|
||||
|
||||
self.failIfEqual(avatar, None)
|
||||
self.assertNotEqual(avatar, None)
|
||||
|
||||
def testImageWithoutExtension(self):
|
||||
# use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png')
|
||||
response = upload_helper(self, "imagefilewithoutext")
|
||||
self.failUnlessEqual(response.status_code, 200)
|
||||
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.assertNotEqual(response.context['upload_avatar_form'].errors, {})
|
||||
|
||||
def testImageWithWrongExtension(self):
|
||||
# use with AVATAR_ALLOWED_FILE_EXTS = ('.jpg', '.png')
|
||||
response = upload_helper(self, "imagefilewithwrongext.ogg")
|
||||
self.failUnlessEqual(response.status_code, 200)
|
||||
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.assertNotEqual(response.context['upload_avatar_form'].errors, {})
|
||||
|
||||
def testImageTooBig(self):
|
||||
# use with AVATAR_MAX_SIZE = 1024 * 1024
|
||||
response = upload_helper(self, "testbig.png")
|
||||
self.failUnlessEqual(response.status_code, 200)
|
||||
self.failUnlessEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.failIfEqual(response.context['upload_avatar_form'].errors, {})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(len(response.redirect_chain), 0) # Redirect only if it worked
|
||||
self.assertNotEqual(response.context['upload_avatar_form'].errors, {})
|
||||
|
||||
def testDefaultUrl(self):
|
||||
response = self.client.get(reverse('avatar_render_primary', kwargs={
|
||||
@ -97,13 +97,13 @@ class AvatarUploadTests(AvatarTestCase):
|
||||
|
||||
def testNonExistingUser(self):
|
||||
a = get_primary_avatar("nonexistinguser@mail.com")
|
||||
self.failUnlessEqual(a, None)
|
||||
self.assertEqual(a, None)
|
||||
|
||||
def testThereCanBeOnlyOnePrimaryAvatar(self):
|
||||
for i in range(1, 10):
|
||||
self.testNormalImageUpload()
|
||||
count = Avatar.objects.filter(emailuser=self.user, primary=True).count()
|
||||
self.failUnlessEqual(count, 1)
|
||||
self.assertEqual(count, 1)
|
||||
|
||||
# def testDeleteAvatar(self):
|
||||
# self.testNormalImageUpload()
|
||||
|
@ -34,11 +34,11 @@ def _get_next(request):
|
||||
3. If Django can determine the previous page from the HTTP headers, the view will
|
||||
redirect to that previous page.
|
||||
"""
|
||||
next = request.POST.get('next', request.GET.get('next',
|
||||
next_page = request.POST.get('next', request.GET.get('next',
|
||||
request.META.get('HTTP_REFERER', None)))
|
||||
if not next:
|
||||
next = request.path
|
||||
return next
|
||||
if not next_page:
|
||||
next_page = request.path
|
||||
return next_page
|
||||
|
||||
def _get_avatars(user):
|
||||
# Default set. Needs to be sliced, but that's it. Keep the natural order.
|
||||
@ -148,10 +148,10 @@ def delete(request, extra_context=None, next_override=None, *args, **kwargs):
|
||||
if request.method == 'POST':
|
||||
if delete_avatar_form.is_valid():
|
||||
ids = delete_avatar_form.cleaned_data['choices']
|
||||
if unicode(avatar.id) in ids and avatars.count() > len(ids):
|
||||
if str(avatar.id) in ids and avatars.count() > len(ids):
|
||||
# Find the next best avatar, and set it as the new primary
|
||||
for a in avatars:
|
||||
if unicode(a.id) not in ids:
|
||||
if str(a.id) not in ids:
|
||||
a.primary = True
|
||||
a.save()
|
||||
avatar_updated.send(sender=Avatar, user=request.user, avatar=avatar)
|
||||
|
@ -11,7 +11,7 @@ from django.utils.translation import ugettext_lazy as _
|
||||
from django.conf import settings
|
||||
from django.contrib.sites.shortcuts import get_current_site
|
||||
import seaserv
|
||||
from seaserv import ccnet_threaded_rpc, unset_repo_passwd, is_passwd_set, \
|
||||
from seaserv import ccnet_threaded_rpc, unset_repo_passwd, \
|
||||
seafile_api, ccnet_api
|
||||
from constance import config
|
||||
from registration import signals
|
||||
@ -89,14 +89,14 @@ class UserManager(object):
|
||||
|
||||
def get(self, email=None, id=None):
|
||||
if not email and not id:
|
||||
raise User.DoesNotExist, 'User matching query does not exits.'
|
||||
raise User.DoesNotExist('User matching query does not exits.')
|
||||
|
||||
if email:
|
||||
emailuser = ccnet_threaded_rpc.get_emailuser(email)
|
||||
if id:
|
||||
emailuser = ccnet_threaded_rpc.get_emailuser_by_id(id)
|
||||
if not emailuser:
|
||||
raise User.DoesNotExist, 'User matching query does not exits.'
|
||||
raise User.DoesNotExist('User matching query does not exits.')
|
||||
|
||||
user = User(emailuser.email)
|
||||
user.id = emailuser.id
|
||||
@ -510,7 +510,7 @@ class User(object):
|
||||
passwd_setted_repos = []
|
||||
for r in owned_repos + shared_repos + groups_repos + public_repos:
|
||||
if not has_repo(passwd_setted_repos, r) and r.encrypted and \
|
||||
is_passwd_set(r.id, self.email):
|
||||
seafile_api.is_password_set(r.id, self.email):
|
||||
passwd_setted_repos.append(r)
|
||||
|
||||
for r in passwd_setted_repos:
|
||||
@ -532,7 +532,7 @@ class User(object):
|
||||
passwd_setted_repos = []
|
||||
for r in owned_repos + shared_repos + groups_repos + public_repos:
|
||||
if not has_repo(passwd_setted_repos, r) and r.encrypted and \
|
||||
is_passwd_set(r.id, self.email):
|
||||
seafile_api.is_password_set(r.id, self.email):
|
||||
passwd_setted_repos.append(r)
|
||||
|
||||
for r in passwd_setted_repos:
|
||||
@ -543,7 +543,7 @@ class AuthBackend(object):
|
||||
def get_user_with_import(self, username):
|
||||
emailuser = seaserv.get_emailuser_with_import(username)
|
||||
if not emailuser:
|
||||
raise User.DoesNotExist, 'User matching query does not exits.'
|
||||
raise User.DoesNotExist('User matching query does not exits.')
|
||||
|
||||
user = User(emailuser.email)
|
||||
user.id = emailuser.id
|
||||
|
@ -14,6 +14,6 @@ class BaseConfig(AppConfig):
|
||||
try:
|
||||
_ = list(FileComment.objects.all()[:1].values('uuid_id'))
|
||||
except:
|
||||
print '''
|
||||
print('''
|
||||
Warning: File comment has changed since version 6.3, while table `base_filecomment` is not migrated yet, please consider migrate it according to v6.3.0 release note, otherwise the file comment feature will not work correctly.
|
||||
'''
|
||||
''')
|
||||
|
@ -2,4 +2,4 @@
|
||||
# Allow users to: from database_storage import DatabaseStorage
|
||||
# (reduce redundancy a little bit)
|
||||
|
||||
from database_storage import *
|
||||
from .database_storage import *
|
||||
|
@ -10,8 +10,8 @@ from django.db import connection, transaction
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import StringIO
|
||||
import urlparse
|
||||
import io
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
|
||||
from seahub.utils.timeutils import value_to_db_datetime
|
||||
@ -137,7 +137,7 @@ class DatabaseStorage(Storage):
|
||||
"""
|
||||
assert mode == 'rb', "DatabaseStorage open mode must be 'rb'."
|
||||
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
|
||||
query = 'SELECT %(data_column)s FROM %(table)s ' + \
|
||||
'WHERE %(name_md5_column)s = %%s'
|
||||
@ -148,7 +148,7 @@ class DatabaseStorage(Storage):
|
||||
if row is None:
|
||||
return None
|
||||
|
||||
inMemFile = StringIO.StringIO(base64.b64decode(row[0]))
|
||||
inMemFile = io.BytesIO(base64.b64decode(row[0]))
|
||||
inMemFile.name = name
|
||||
inMemFile.mode = mode
|
||||
|
||||
@ -160,7 +160,7 @@ class DatabaseStorage(Storage):
|
||||
in the name will be converted to forward '/'.
|
||||
"""
|
||||
name = name.replace('\\', '/')
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
binary = content.read()
|
||||
|
||||
size = len(binary)
|
||||
@ -185,7 +185,7 @@ class DatabaseStorage(Storage):
|
||||
return name
|
||||
|
||||
def exists(self, name):
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
query = 'SELECT COUNT(*) FROM %(table)s WHERE %(name_md5_column)s = %%s'
|
||||
query %= self.__dict__
|
||||
cursor = connection.cursor()
|
||||
@ -196,7 +196,7 @@ class DatabaseStorage(Storage):
|
||||
def delete(self, name):
|
||||
if self.exists(name):
|
||||
with transaction.atomic(using='default'):
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
query = 'DELETE FROM %(table)s WHERE %(name_md5_column)s = %%s'
|
||||
query %= self.__dict__
|
||||
connection.cursor().execute(query, [name_md5])
|
||||
@ -207,12 +207,12 @@ class DatabaseStorage(Storage):
|
||||
def url(self, name):
|
||||
if self.base_url is None:
|
||||
raise ValueError("This file is not accessible via a URL.")
|
||||
result = urlparse.urljoin(self.base_url, name).replace('\\', '/')
|
||||
result = urllib.parse.urljoin(self.base_url, name).replace('\\', '/')
|
||||
return result
|
||||
|
||||
def size(self, name):
|
||||
"Get the size of the given filename or raise ObjectDoesNotExist."
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
query = 'SELECT %(size_column)s FROM %(table)s ' + \
|
||||
'WHERE %(name_md5_column)s = %%s'
|
||||
query %= self.__dict__
|
||||
@ -226,7 +226,7 @@ class DatabaseStorage(Storage):
|
||||
|
||||
def modified_time(self, name):
|
||||
"Get the modified time of the given filename or raise ObjectDoesNotExist."
|
||||
name_md5 = hashlib.md5(name).hexdigest()
|
||||
name_md5 = hashlib.md5(name.encode('utf-8')).hexdigest()
|
||||
query = 'SELECT %(mtime_column)s FROM %(table)s ' + \
|
||||
'WHERE %(name_md5_column)s = %%s'
|
||||
query %= self.__dict__
|
||||
|
@ -4,7 +4,7 @@ from django.http import Http404, HttpResponseRedirect, HttpResponseNotAllowed
|
||||
from django.shortcuts import render
|
||||
|
||||
from django.utils.http import urlquote
|
||||
from seaserv import get_repo, is_passwd_set
|
||||
from seaserv import get_repo, seafile_api
|
||||
|
||||
from seahub.options.models import UserOptions, CryptoOptionNotSetError
|
||||
|
||||
@ -49,7 +49,7 @@ def repo_passwd_set_required(func):
|
||||
def _decorated(request, *args, **kwargs):
|
||||
repo_id = kwargs.get('repo_id', None)
|
||||
if not repo_id:
|
||||
raise Exception, 'Repo id is not found in url.'
|
||||
raise Exception('Repo id is not found in url.')
|
||||
repo = get_repo(repo_id)
|
||||
if not repo:
|
||||
raise Http404
|
||||
@ -62,14 +62,14 @@ def repo_passwd_set_required(func):
|
||||
})
|
||||
|
||||
if (repo.enc_version == 1 or (repo.enc_version == 2 and server_crypto)) \
|
||||
and not is_passwd_set(repo_id, username):
|
||||
and not seafile_api.is_password_set(repo_id, username):
|
||||
return render(request, 'decrypt_repo_form.html', {
|
||||
'repo': repo,
|
||||
'next': request.get_full_path(),
|
||||
})
|
||||
|
||||
if repo.enc_version == 2 and not server_crypto:
|
||||
return render_error(request, _(u'Files in this library can not be viewed online.'))
|
||||
return render_error(request, _('Files in this library can not be viewed online.'))
|
||||
|
||||
return func(request, *args, **kwargs)
|
||||
return _decorated
|
||||
|
@ -15,7 +15,7 @@ class ModifyingFieldDescriptor(object):
|
||||
class LowerCaseCharField(CharField):
|
||||
def to_python(self, value):
|
||||
value = super(LowerCaseCharField, self).to_python(value)
|
||||
if isinstance(value, basestring):
|
||||
if isinstance(value, str):
|
||||
return value.lower()
|
||||
return value
|
||||
def contribute_to_class(self, cls, name):
|
||||
|
@ -12,7 +12,7 @@ class DirectTemplateView(TemplateView):
|
||||
def get_context_data(self, **kwargs):
|
||||
context = super(self.__class__, self).get_context_data(**kwargs)
|
||||
if self.extra_context is not None:
|
||||
for key, value in self.extra_context.items():
|
||||
for key, value in list(self.extra_context.items()):
|
||||
if callable(value):
|
||||
context[key] = value()
|
||||
else:
|
||||
|
@ -28,7 +28,7 @@ class Command(BaseCommand):
|
||||
except User.DoesNotExist:
|
||||
raise CommandError("user '%s' does not exist" % username)
|
||||
|
||||
print "Changing password for user '%s'" % u.username
|
||||
print("Changing password for user '%s'" % u.username)
|
||||
|
||||
MAX_TRIES = 3
|
||||
count = 0
|
||||
@ -37,7 +37,7 @@ class Command(BaseCommand):
|
||||
p1 = self._get_pass()
|
||||
p2 = self._get_pass("Password (again): ")
|
||||
if p1 != p2:
|
||||
print "Passwords do not match. Please try again."
|
||||
print("Passwords do not match. Please try again.")
|
||||
count = count + 1
|
||||
|
||||
if count == MAX_TRIES:
|
||||
|
@ -27,7 +27,7 @@ class Command(BaseCommand):
|
||||
'ExtraSharePermission': ExtraSharePermission,
|
||||
'UploadLinkShare': UploadLinkShare}
|
||||
|
||||
for table in self.tables.items():
|
||||
for table in list(self.tables.items()):
|
||||
self.clear_table(table[0], table[1])
|
||||
|
||||
self.stdout.write('All invalid repo data are deleted')
|
||||
|
@ -118,9 +118,9 @@ class Command(BaseCommand):
|
||||
# username = None
|
||||
|
||||
# Get an email
|
||||
while 1:
|
||||
while True:
|
||||
if not email:
|
||||
email = raw_input('E-mail address: ')
|
||||
email = input('E-mail address: ')
|
||||
try:
|
||||
is_valid_email(email)
|
||||
except exceptions.ValidationError:
|
||||
@ -130,7 +130,7 @@ class Command(BaseCommand):
|
||||
break
|
||||
|
||||
# Get a password
|
||||
while 1:
|
||||
while True:
|
||||
if not password:
|
||||
password = getpass.getpass()
|
||||
password2 = getpass.getpass('Password (again): ')
|
||||
@ -148,4 +148,4 @@ class Command(BaseCommand):
|
||||
sys.exit(1)
|
||||
|
||||
User.objects.create_superuser(email, password)
|
||||
print "Superuser created successfully."
|
||||
print("Superuser created successfully.")
|
||||
|
@ -62,7 +62,7 @@ class Command(BaseCommand):
|
||||
repo_obj_dict = {}
|
||||
repo_owner_dict = {}
|
||||
|
||||
events.sort(lambda x, y: cmp(y.timestamp, x.timestamp))
|
||||
events.sort(key=lambda x: x.timestamp, reverse=True)
|
||||
for ev in events:
|
||||
event_type, ev.show_device = generate_file_audit_event_type(ev)
|
||||
|
||||
|
@ -24,19 +24,19 @@ class Command(BaseCommand):
|
||||
elif 'sqlite' in engine:
|
||||
sqlite = True
|
||||
else:
|
||||
print 'Unsupported database. Exit.'
|
||||
print('Unsupported database. Exit.')
|
||||
return
|
||||
|
||||
print 'Start to update schema...'
|
||||
print('Start to update schema...')
|
||||
|
||||
comments = list(FileComment.objects.raw('SELECT * from base_filecomment'))
|
||||
|
||||
with connection.cursor() as cursor:
|
||||
sql = 'ALTER TABLE base_filecomment RENAME TO base_filecomment_backup_%s' % (random_key())
|
||||
cursor.execute(sql)
|
||||
print sql
|
||||
print(sql)
|
||||
|
||||
print ''
|
||||
print('')
|
||||
|
||||
if mysql:
|
||||
sql = '''CREATE TABLE `base_filecomment` (
|
||||
@ -54,23 +54,23 @@ class Command(BaseCommand):
|
||||
''' % (random_key(), random_key(), random_key())
|
||||
|
||||
cursor.execute(sql)
|
||||
print sql
|
||||
print(sql)
|
||||
|
||||
if sqlite:
|
||||
sql = '''CREATE TABLE "base_filecomment" ("id" integer NOT NULL PRIMARY KEY AUTOINCREMENT, "author" varchar(255) NOT NULL, "comment" text NOT NULL, "created_at" datetime NOT NULL, "updated_at" datetime NOT NULL, "uuid_id" char(32) NOT NULL REFERENCES "tags_fileuuidmap" ("uuid"))
|
||||
'''
|
||||
cursor.execute(sql)
|
||||
print sql
|
||||
print(sql)
|
||||
|
||||
sql = '''CREATE INDEX "base_filecomment_%s" ON "base_filecomment" ("author")''' % random_key()
|
||||
cursor.execute(sql)
|
||||
print sql
|
||||
print(sql)
|
||||
|
||||
sql = '''CREATE INDEX "base_filecomment_%s" ON "base_filecomment" ("uuid_id") ''' % random_key()
|
||||
cursor.execute(sql)
|
||||
print sql
|
||||
print(sql)
|
||||
|
||||
print 'Start to migate comments data...'
|
||||
print('Start to migate comments data...')
|
||||
for c in comments:
|
||||
repo_id = c.repo_id
|
||||
parent_path = c.parent_path
|
||||
@ -83,15 +83,15 @@ class Command(BaseCommand):
|
||||
uuid = FileUUIDMap.objects.get_or_create_fileuuidmap(repo_id, parent_path, filename, False)
|
||||
FileComment(uuid=uuid, author=author, comment=comment,
|
||||
created_at=created_at, updated_at=updated_at).save()
|
||||
print 'migrated comment ID: %d' % c.pk
|
||||
print('migrated comment ID: %d' % c.pk)
|
||||
|
||||
print 'Done'
|
||||
print('Done')
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# check table column `uuid`
|
||||
try:
|
||||
res = FileComment.objects.raw('SELECT uuid_id from base_filecomment limit 1')
|
||||
if 'uuid_id' in res.columns:
|
||||
print 'base_filecomment is already migrated, exit.'
|
||||
print('base_filecomment is already migrated, exit.')
|
||||
except OperationalError:
|
||||
self.migrate_schema()
|
||||
|
@ -185,7 +185,7 @@ class UserPermissionMiddleware(object):
|
||||
|
||||
request_path = request.path
|
||||
def get_permission_by_request_path(request_path, permission_url):
|
||||
for permission, url_list in permission_url.iteritems():
|
||||
for permission, url_list in permission_url.items():
|
||||
for url in url_list:
|
||||
if url in request_path:
|
||||
return permission
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-03-21 08:42
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
import datetime
|
||||
from django.db import migrations, models
|
||||
@ -126,6 +126,6 @@ class Migration(migrations.Migration):
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='devicetoken',
|
||||
unique_together=set([('token', 'user')]),
|
||||
unique_together={('token', 'user')},
|
||||
),
|
||||
]
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.11 on 2018-07-10 09:33
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by Django 1.11.15 on 2018-10-16 12:42
|
||||
from __future__ import unicode_literals
|
||||
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
@ -15,7 +15,7 @@ from seahub.utils import calc_file_path_hash, within_time_range, \
|
||||
normalize_file_path, normalize_dir_path
|
||||
from seahub.utils.timeutils import datetime_to_isoformat_timestr
|
||||
from seahub.tags.models import FileUUIDMap
|
||||
from fields import LowerCaseCharField
|
||||
from .fields import LowerCaseCharField
|
||||
|
||||
|
||||
# Get an instance of a logger
|
||||
@ -192,7 +192,7 @@ class UserStarredFilesManager(models.Manager):
|
||||
repo_cache = {}
|
||||
for sfile in starred_files:
|
||||
# repo still exists?
|
||||
if repo_cache.has_key(sfile.repo_id):
|
||||
if sfile.repo_id in repo_cache:
|
||||
repo = repo_cache[sfile.repo_id]
|
||||
else:
|
||||
try:
|
||||
@ -241,7 +241,7 @@ class UserStarredFilesManager(models.Manager):
|
||||
logger.error(e)
|
||||
sfile.last_modified = 0
|
||||
|
||||
ret.sort(lambda x, y: cmp(y.last_modified, x.last_modified))
|
||||
ret.sort(key=lambda x: x.last_modified, reverse=True)
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -26,10 +26,10 @@ THE SOFTWARE.
|
||||
try:
|
||||
import cProfile as profile
|
||||
except ImportError:
|
||||
import profile
|
||||
from . import profile
|
||||
|
||||
import pstats
|
||||
from cStringIO import StringIO
|
||||
from io import StringIO
|
||||
from django.conf import settings
|
||||
|
||||
class ProfilerMiddleware(object):
|
||||
|
@ -2,10 +2,10 @@
|
||||
from django import template
|
||||
from django.core.urlresolvers import reverse
|
||||
from django.http import QueryDict
|
||||
from django.utils.encoding import force_unicode
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.html import escape
|
||||
from django.utils.safestring import SafeData, mark_safe
|
||||
from urlparse import urlsplit, urlunsplit
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
import re
|
||||
import string
|
||||
|
||||
@ -97,7 +97,7 @@ def add_class(value, css_class):
|
||||
In the case of REST Framework, the filter is used to add Bootstrap-specific
|
||||
classes to the forms.
|
||||
"""
|
||||
html = unicode(value)
|
||||
html = str(value)
|
||||
match = class_re.search(html)
|
||||
if match:
|
||||
m = re.search(r'^%s$|^%s\s|\s%s\s|\s%s$' % (css_class, css_class,
|
||||
@ -131,7 +131,7 @@ def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=Tru
|
||||
"""
|
||||
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
|
||||
safe_input = isinstance(text, SafeData)
|
||||
words = word_split_re.split(force_unicode(text))
|
||||
words = word_split_re.split(force_text(text))
|
||||
nofollow_attr = nofollow and ' rel="nofollow"' or ''
|
||||
for i, word in enumerate(words):
|
||||
match = None
|
||||
@ -167,4 +167,4 @@ def urlize_quoted_links(text, trim_url_limit=None, nofollow=True, autoescape=Tru
|
||||
words[i] = mark_safe(word)
|
||||
elif autoescape:
|
||||
words[i] = escape(word)
|
||||
return mark_safe(u''.join(words))
|
||||
return mark_safe(''.join(words))
|
||||
|
@ -53,23 +53,23 @@ FILEEXT_ICON_MAP = {
|
||||
'txt': 'txt.png',
|
||||
|
||||
# pdf file
|
||||
'pdf' : 'pdf.png',
|
||||
'pdf': 'pdf.png',
|
||||
|
||||
# document file
|
||||
'doc' : 'word.png',
|
||||
'docx' : 'word.png',
|
||||
'odt' : 'word.png',
|
||||
'fodt' : 'word.png',
|
||||
'doc': 'word.png',
|
||||
'docx': 'word.png',
|
||||
'odt': 'word.png',
|
||||
'fodt': 'word.png',
|
||||
|
||||
'ppt' : 'ppt.png',
|
||||
'pptx' : 'ppt.png',
|
||||
'odp' : 'ppt.png',
|
||||
'fodp' : 'ppt.png',
|
||||
'ppt': 'ppt.png',
|
||||
'pptx': 'ppt.png',
|
||||
'odp': 'ppt.png',
|
||||
'fodp': 'ppt.png',
|
||||
|
||||
'xls' : 'excel.png',
|
||||
'xlsx' : 'excel.png',
|
||||
'ods' : 'excel.png',
|
||||
'fods' : 'excel.png',
|
||||
'xls': 'excel.png',
|
||||
'xlsx': 'excel.png',
|
||||
'ods': 'excel.png',
|
||||
'fods': 'excel.png',
|
||||
|
||||
# video
|
||||
'mp4': 'video.png',
|
||||
@ -81,25 +81,25 @@ FILEEXT_ICON_MAP = {
|
||||
'rmvb': 'video.png',
|
||||
|
||||
# music file
|
||||
'mp3' : 'music.png',
|
||||
'oga' : 'music.png',
|
||||
'ogg' : 'music.png',
|
||||
'flac' : 'music.png',
|
||||
'aac' : 'music.png',
|
||||
'ac3' : 'music.png',
|
||||
'wma' : 'music.png',
|
||||
'mp3': 'music.png',
|
||||
'oga': 'music.png',
|
||||
'ogg': 'music.png',
|
||||
'flac': 'music.png',
|
||||
'aac': 'music.png',
|
||||
'ac3': 'music.png',
|
||||
'wma': 'music.png',
|
||||
|
||||
# image file
|
||||
'jpg' : 'pic.png',
|
||||
'jpeg' : 'pic.png',
|
||||
'png' : 'pic.png',
|
||||
'svg' : 'pic.png',
|
||||
'gif' : 'pic.png',
|
||||
'bmp' : 'pic.png',
|
||||
'ico' : 'pic.png',
|
||||
'jpg': 'pic.png',
|
||||
'jpeg': 'pic.png',
|
||||
'png': 'pic.png',
|
||||
'svg': 'pic.png',
|
||||
'gif': 'pic.png',
|
||||
'bmp': 'pic.png',
|
||||
'ico': 'pic.png',
|
||||
|
||||
# default
|
||||
'default' : 'file.png',
|
||||
'default': 'file.png',
|
||||
}
|
||||
@register.filter(name='file_icon_filter')
|
||||
def file_icon_filter(value, size=None):
|
||||
@ -109,7 +109,7 @@ def file_icon_filter(value, size=None):
|
||||
else:
|
||||
file_ext = None
|
||||
|
||||
if file_ext and FILEEXT_ICON_MAP.has_key(file_ext):
|
||||
if file_ext and file_ext in FILEEXT_ICON_MAP:
|
||||
if size == 192:
|
||||
return '192/' + FILEEXT_ICON_MAP.get(file_ext)
|
||||
else:
|
||||
@ -122,17 +122,17 @@ def file_icon_filter(value, size=None):
|
||||
|
||||
# This way of translation looks silly, but works well.
|
||||
COMMIT_MSG_TRANSLATION_MAP = {
|
||||
'Added' : _('Added'),
|
||||
'Deleted' : _('Deleted'),
|
||||
'Removed' : _('Removed'),
|
||||
'Modified' : _('Modified'),
|
||||
'Renamed' : _('Renamed'),
|
||||
'Moved' : _('Moved'),
|
||||
'Added directory' : _('Added directory'),
|
||||
'Removed directory' : _('Removed directory'),
|
||||
'Renamed directory' : _('Renamed directory'),
|
||||
'Moved directory' : _('Moved directory'),
|
||||
'Added or modified' : _('Added or modified'),
|
||||
'Added': _('Added'),
|
||||
'Deleted': _('Deleted'),
|
||||
'Removed': _('Removed'),
|
||||
'Modified': _('Modified'),
|
||||
'Renamed': _('Renamed'),
|
||||
'Moved': _('Moved'),
|
||||
'Added directory': _('Added directory'),
|
||||
'Removed directory': _('Removed directory'),
|
||||
'Renamed directory': _('Renamed directory'),
|
||||
'Moved directory': _('Moved directory'),
|
||||
'Added or modified': _('Added or modified'),
|
||||
}
|
||||
@register.filter(name='translate_commit_desc')
|
||||
def translate_commit_desc(value):
|
||||
@ -164,7 +164,7 @@ def translate_commit_desc(value):
|
||||
else:
|
||||
# Use regular expression to translate commit description.
|
||||
# Commit description has two forms, e.g., 'Added "foo.txt" and 3 more files.' or 'Added "foo.txt".'
|
||||
operations = '|'.join(COMMIT_MSG_TRANSLATION_MAP.keys())
|
||||
operations = '|'.join(list(COMMIT_MSG_TRANSLATION_MAP.keys()))
|
||||
patt = r'(%s) "(.*)"\s?(and ([0-9]+) more (files|directories))?' % operations
|
||||
|
||||
ret_list = []
|
||||
@ -186,14 +186,14 @@ def translate_commit_desc(value):
|
||||
|
||||
if has_more:
|
||||
if translation.get_language() == 'zh-cn':
|
||||
typ = u'文件' if more_type == 'files' else u'目录'
|
||||
ret = op_trans + u' "' + file_name + u'"以及另外' + n_files + u'个' + typ + '.'
|
||||
typ = '文件' if more_type == 'files' else '目录'
|
||||
ret = op_trans + ' "' + file_name + '"以及另外' + n_files + '个' + typ + '.'
|
||||
# elif translation.get_language() == 'ru':
|
||||
# ret = ...
|
||||
else:
|
||||
ret = e
|
||||
else:
|
||||
ret = op_trans + u' "' + file_name + u'".'
|
||||
ret = op_trans + ' "' + file_name + '".'
|
||||
ret_list.append(ret)
|
||||
|
||||
return '\n'.join(ret_list)
|
||||
@ -235,7 +235,7 @@ def translate_commit_desc_escape(value):
|
||||
else:
|
||||
# Use regular expression to translate commit description.
|
||||
# Commit description has two forms, e.g., 'Added "foo.txt" and 3 more files.' or 'Added "foo.txt".'
|
||||
operations = '|'.join(COMMIT_MSG_TRANSLATION_MAP.keys())
|
||||
operations = '|'.join(list(COMMIT_MSG_TRANSLATION_MAP.keys()))
|
||||
patt = r'(%s) "(.*)"\s?(and ([0-9]+) more (files|directories))?' % operations
|
||||
|
||||
for e in value.split('\n'):
|
||||
@ -258,14 +258,14 @@ def translate_commit_desc_escape(value):
|
||||
|
||||
if has_more:
|
||||
if translation.get_language() == 'zh-cn':
|
||||
typ = u'文件' if more_type == 'files' else u'目录'
|
||||
ret = op_trans + u' "' + file_name + u'"以及另外' + n_files + u'个' + typ + '.'
|
||||
typ = '文件' if more_type == 'files' else '目录'
|
||||
ret = op_trans + ' "' + file_name + '"以及另外' + n_files + '个' + typ + '.'
|
||||
# elif translation.get_language() == 'ru':
|
||||
# ret = ...
|
||||
else:
|
||||
ret = e
|
||||
else:
|
||||
ret = op_trans + u' "' + file_name + u'".'
|
||||
ret = op_trans + ' "' + file_name + '".'
|
||||
|
||||
# if not match, this commit desc will not convert link, so
|
||||
# escape it
|
||||
@ -278,7 +278,7 @@ def translate_commit_desc_escape(value):
|
||||
|
||||
@register.filter(name='translate_seahub_time')
|
||||
def translate_seahub_time(value, autoescape=None):
|
||||
if isinstance(value, int) or isinstance(value, long): # check whether value is int
|
||||
if isinstance(value, int) or isinstance(value, int): # check whether value is int
|
||||
try:
|
||||
val = datetime.fromtimestamp(value) # convert timestamp to datetime
|
||||
except ValueError as e:
|
||||
@ -461,9 +461,9 @@ def char2pinyin(value):
|
||||
@register.filter(name='translate_permission')
|
||||
def translate_permission(value):
|
||||
if value == 'rw':
|
||||
return _(u'Read-Write')
|
||||
return _('Read-Write')
|
||||
elif value == 'r':
|
||||
return _(u'Read-Only')
|
||||
return _('Read-Only')
|
||||
else:
|
||||
return ''
|
||||
|
||||
|
@ -3,7 +3,7 @@ import re
|
||||
import string
|
||||
|
||||
from django.utils.safestring import SafeData, mark_safe
|
||||
from django.utils.encoding import force_unicode
|
||||
from django.utils.encoding import force_text
|
||||
from django.utils.functional import allow_lazy
|
||||
from django.utils.http import urlquote
|
||||
|
||||
@ -21,8 +21,8 @@ def escape(html):
|
||||
"""
|
||||
Returns the given HTML with ampersands, quotes and angle brackets encoded.
|
||||
"""
|
||||
return mark_safe(force_unicode(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
|
||||
escape = allow_lazy(escape, unicode)
|
||||
return mark_safe(force_text(html).replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", '''))
|
||||
escape = allow_lazy(escape, str)
|
||||
|
||||
## modification of django's urlize, add '%' to safe:
|
||||
## urlquote('http://%s' % middle, safe='/&=:;#?+*%')
|
||||
@ -47,7 +47,7 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
|
||||
"""
|
||||
trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x
|
||||
safe_input = isinstance(text, SafeData)
|
||||
words = word_split_re.split(force_unicode(text))
|
||||
words = word_split_re.split(force_text(text))
|
||||
nofollow_attr = nofollow and ' rel="nofollow"' or ''
|
||||
for i, word in enumerate(words):
|
||||
match = None
|
||||
@ -83,5 +83,5 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
|
||||
words[i] = mark_safe(word)
|
||||
elif autoescape:
|
||||
words[i] = escape(word)
|
||||
return u''.join(words)
|
||||
urlize = allow_lazy(urlize, unicode)
|
||||
return ''.join(words)
|
||||
urlize = allow_lazy(urlize, str)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user