1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-09-20 10:58:33 +00:00
Files
seahub/seahub/utils/__init__.py

1335 lines
43 KiB
Python
Raw Normal View History

2016-07-26 10:47:45 +08:00
# Copyright (c) 2012-2016 Seafile Ltd.
2012-12-28 16:16:40 +08:00
# encoding: utf-8
import os
import re
2014-11-20 22:53:02 +08:00
import urllib
2012-12-28 16:16:40 +08:00
import urllib2
2013-01-19 17:50:38 +08:00
import uuid
import logging
import hashlib
import tempfile
import locale
import ConfigParser
2014-11-20 22:53:02 +08:00
import mimetypes
import contextlib
from datetime import datetime
2014-11-20 22:53:02 +08:00
from urlparse import urlparse, urljoin
import json
import ccnet
from constance import config
2015-10-27 12:12:00 +08:00
import seaserv
from seaserv import seafile_api
2012-12-28 16:16:40 +08:00
from django.core.urlresolvers import reverse
from django.core.mail import EmailMessage
2012-12-28 16:16:40 +08:00
from django.shortcuts import render_to_response
from django.template import RequestContext, Context, loader
from django.utils.translation import ugettext as _
2014-11-20 22:53:02 +08:00
from django.http import HttpResponseRedirect, HttpResponse, HttpResponseNotModified
2013-03-29 11:46:28 +08:00
from django.utils.http import urlquote
from django.utils.html import escape
2014-11-20 22:53:02 +08:00
from django.views.static import serve as django_static_serve
2012-12-28 16:16:40 +08:00
from seahub.api2.models import Token, TokenV2
import seahub.settings
2015-09-21 11:48:07 +08:00
from seahub.settings import SITE_NAME, MEDIA_URL, LOGO_PATH
2012-12-28 16:16:40 +08:00
try:
from seahub.settings import EVENTS_CONFIG_FILE
2012-12-28 16:16:40 +08:00
except ImportError:
EVENTS_CONFIG_FILE = None
2013-01-09 17:28:56 +08:00
try:
from seahub.settings import EMAIL_HOST
2013-01-09 17:28:56 +08:00
IS_EMAIL_CONFIGURED = True
except ImportError:
IS_EMAIL_CONFIGURED = False
try:
from seahub.settings import CLOUD_MODE
except ImportError:
CLOUD_MODE = False
2013-07-25 13:30:19 +08:00
try:
2014-07-01 11:48:35 +08:00
from seahub.settings import ENABLE_INNER_FILESERVER
2013-07-25 13:30:19 +08:00
except ImportError:
2014-07-01 11:48:35 +08:00
ENABLE_INNER_FILESERVER = True
try:
from seahub.settings import CHECK_SHARE_LINK_TRAFFIC
except ImportError:
CHECK_SHARE_LINK_TRAFFIC = False
2015-10-27 12:12:00 +08:00
2014-11-20 22:53:02 +08:00
def is_cluster_mode():
cfg = ConfigParser.ConfigParser()
2015-11-11 15:36:22 +08:00
if 'SEAFILE_CENTRAL_CONF_DIR' in os.environ:
confdir = os.environ['SEAFILE_CENTRAL_CONF_DIR']
else:
confdir = os.environ['SEAFILE_CONF_DIR']
conf = os.path.join(confdir, 'seafile.conf')
2014-11-20 22:53:02 +08:00
cfg.read(conf)
if cfg.has_option('cluster', 'enabled'):
enabled = cfg.getboolean('cluster', 'enabled')
else:
enabled = False
if enabled:
2015-01-08 16:26:57 +08:00
logging.debug('cluster mode is enabled')
2014-11-20 22:53:02 +08:00
else:
2015-01-08 16:26:57 +08:00
logging.debug('cluster mode is disabled')
2014-11-20 22:53:02 +08:00
return enabled
CLUSTER_MODE = is_cluster_mode()
try:
from seahub.settings import OFFICE_CONVERTOR_ROOT
except ImportError:
OFFICE_CONVERTOR_ROOT = ''
try:
from seahub.settings import OFFICE_CONVERTOR_NODE
except ImportError:
OFFICE_CONVERTOR_NODE = False
from seahub.utils.file_types import *
2014-05-23 13:44:53 +08:00
from seahub.utils.htmldiff import HtmlDiff # used in views/files.py
2012-12-28 16:16:40 +08:00
EMPTY_SHA1 = '0000000000000000000000000000000000000000'
2014-05-23 13:44:53 +08:00
MAX_INT = 2147483647
2012-12-28 16:16:40 +08:00
PREVIEW_FILEEXT = {
IMAGE: ('gif', 'jpeg', 'jpg', 'png', 'ico', 'bmp'),
DOCUMENT: ('doc', 'docx', 'ppt', 'pptx', 'odt', 'fodt', 'odp', 'fodp'),
SPREADSHEET: ('xls', 'xlsx', 'ods', 'fods'),
# SVG: ('svg',),
2013-03-26 14:19:56 +08:00
PDF: ('pdf',),
MARKDOWN: ('markdown', 'md'),
VIDEO: ('mp4', 'ogv', 'webm', 'mov'),
AUDIO: ('mp3', 'oga', 'ogg'),
'3D': ('stl', 'obj'),
2012-12-28 16:16:40 +08:00
}
def gen_fileext_type_map():
"""
Generate previewed file extension and file type relation map.
2012-12-28 16:16:40 +08:00
"""
d = {}
for filetype in PREVIEW_FILEEXT.keys():
for fileext in PREVIEW_FILEEXT.get(filetype):
d[fileext] = filetype
return d
FILEEXT_TYPE_MAP = gen_fileext_type_map()
def render_permission_error(request, msg=None, extra_ctx=None):
"""
Return permisson error page.
"""
ctx = {}
2013-01-31 11:49:54 +08:00
ctx['error_msg'] = msg or _('permission error')
2012-12-28 16:16:40 +08:00
if extra_ctx:
for k in extra_ctx:
ctx[k] = extra_ctx[k]
return render_to_response('error.html', ctx,
2012-12-28 16:16:40 +08:00
context_instance=RequestContext(request))
def render_error(request, msg=None, extra_ctx=None):
"""
Return normal error page.
"""
ctx = {}
ctx['error_msg'] = msg or _('Internal error')
2012-12-28 16:16:40 +08:00
if extra_ctx:
for k in extra_ctx:
ctx[k] = extra_ctx[k]
return render_to_response('error.html', ctx,
context_instance=RequestContext(request))
def list_to_string(l):
"""
Return string of a list.
"""
return ','.join(l)
2014-07-01 11:48:35 +08:00
def get_fileserver_root():
""" Construct seafile fileserver address and port.
2013-07-25 13:30:19 +08:00
Returns:
2014-07-01 11:48:35 +08:00
Constructed fileserver root.
2012-12-28 16:16:40 +08:00
"""
return config.FILE_SERVER_ROOT
2013-07-25 13:30:19 +08:00
2014-07-01 11:48:35 +08:00
def get_inner_fileserver_root():
"""Construct inner seafile fileserver address and port.
2013-07-25 13:30:19 +08:00
2014-07-01 11:48:35 +08:00
Inner fileserver root allows Seahub access fileserver through local
2013-07-25 13:30:19 +08:00
address, thus avoiding the overhead of DNS queries, as well as other
related issues, for example, the server can not ping itself, etc.
Returns:
http://127.0.0.1:<port>
2012-12-28 16:16:40 +08:00
"""
2014-07-01 11:48:35 +08:00
return seahub.settings.INNER_FILE_SERVER_ROOT
2012-12-28 16:16:40 +08:00
def gen_token(max_length=5):
"""
Generate a random token.
"""
return uuid.uuid4().hex[:max_length]
2012-12-28 16:16:40 +08:00
2015-10-16 11:09:24 +08:00
def normalize_cache_key(value, prefix=None, token=None, max_length=200):
"""Returns a cache key consisten of ``value`` and ``prefix`` and ``token``. Cache key
must not include control characters or whitespace.
"""
key = value if prefix is None else prefix + value
key = key if token is None else key + '_' + token
2015-10-16 11:09:24 +08:00
return urlquote(key)[:max_length]
2013-08-12 18:04:12 +08:00
def get_repo_last_modify(repo):
""" Get last modification time for a repo.
If head commit id of a repo is provided, we use that commit as last commit,
otherwise falls back to getting last commit of a repo which is time
consuming.
2012-12-28 16:16:40 +08:00
"""
2013-08-12 18:04:12 +08:00
if repo.head_cmmt_id is not None:
2015-10-27 12:12:00 +08:00
last_cmmt = seaserv.get_commit(repo.id, repo.version, repo.head_cmmt_id)
2013-08-12 18:04:12 +08:00
else:
2014-05-23 15:44:33 +08:00
logger = logging.getLogger(__name__)
2013-08-12 18:04:12 +08:00
logger.info('[repo %s] head_cmmt_id is missing.' % repo.id)
2015-10-27 12:12:00 +08:00
last_cmmt = seafile_api.get_commit_list(repo.id, 0, 1)[0]
2013-08-12 18:04:12 +08:00
return last_cmmt.ctime if last_cmmt else 0
def calculate_repos_last_modify(repo_list):
""" Get last modification time for repos.
2012-12-28 16:16:40 +08:00
"""
for repo in repo_list:
2013-08-12 18:04:12 +08:00
repo.latest_modify = get_repo_last_modify(repo)
2012-12-28 16:16:40 +08:00
2013-06-24 17:56:31 +08:00
def normalize_dir_path(path):
"""Add '/' at the end of directory path if necessary.
And make sure path starts with '/'
2013-06-24 17:56:31 +08:00
"""
path = path.strip('/')
if path == '':
return '/'
else:
return '/' + path + '/'
2013-06-24 17:56:31 +08:00
def normalize_file_path(path):
"""Remove '/' at the end of file path if necessary.
And make sure path starts with '/'
2013-06-24 17:56:31 +08:00
"""
path = path.strip('/')
if path == '':
return ''
else:
return '/' + path
2013-08-02 10:34:04 +08:00
# modified from django1.5:/core/validators, and remove the support for single
# quote in email address
email_re = re.compile(
r"(^[-!#$%&*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
2013-08-02 10:34:04 +08:00
def is_valid_email(email):
2014-01-08 18:23:14 +08:00
"""A heavy email format validation.
2013-08-02 10:34:04 +08:00
"""
2014-01-08 18:23:14 +08:00
return True if email_re.match(email) is not None else False
def is_valid_username(username):
"""Check whether username is valid, currently only email can be a username.
"""
return is_valid_email(username)
def is_valid_dirent_name(name):
"""Check whether repo/dir/file name is valid.
"""
# `repo_id` parameter is not used in seafile api
return seafile_api.is_valid_filename('fake_repo_id', name)
def is_ldap_user(user):
"""Check whether user is a LDAP user.
"""
2015-08-28 17:50:45 +08:00
return user.source == 'LDAP' or user.source == 'LDAPImport'
2017-07-03 12:24:07 +08:00
def get_no_duplicate_obj_name(obj_name, exist_obj_names):
2012-12-28 16:16:40 +08:00
2017-07-03 12:24:07 +08:00
def no_duplicate(obj_name):
for exist_obj_name in exist_obj_names:
if exist_obj_name == obj_name:
2012-12-28 16:16:40 +08:00
return False
return True
2017-07-03 12:24:07 +08:00
def make_new_name(obj_name, i):
base, ext = os.path.splitext(obj_name)
2012-12-28 16:16:40 +08:00
if ext:
new_base = "%s (%d)" % (base, i)
return new_base + ext
else:
2017-07-03 12:24:07 +08:00
return "%s (%d)" % (obj_name, i)
2012-12-28 16:16:40 +08:00
2017-07-03 12:24:07 +08:00
if no_duplicate(obj_name):
return obj_name
2012-12-28 16:16:40 +08:00
else:
i = 1
while True:
2017-07-03 12:24:07 +08:00
new_name = make_new_name(obj_name, i)
2012-12-28 16:16:40 +08:00
if no_duplicate(new_name):
return new_name
else:
i += 1
2017-07-03 12:24:07 +08:00
def check_filename_with_rename(repo_id, parent_dir, obj_name):
cmmts = seafile_api.get_commit_list(repo_id, 0, 1)
latest_commit = cmmts[0] if cmmts else None
if not latest_commit:
return ''
# TODO: what if parrent_dir does not exist?
dirents = seafile_api.list_dir_by_commit_and_path(repo_id,
latest_commit.id, parent_dir.encode('utf-8'))
exist_obj_names = [dirent.obj_name for dirent in dirents]
return get_no_duplicate_obj_name(obj_name, exist_obj_names)
def get_user_repos(username, org_id=None):
2012-12-28 16:16:40 +08:00
"""
Get all repos that user can access, including owns, shared, public, and
repo in groups.
If ``org_id`` is not None, get org repos that user can access.
2012-12-28 16:16:40 +08:00
"""
if org_id is None:
2017-05-02 14:09:25 +08:00
owned_repos = seafile_api.get_owned_repo_list(username)
shared_repos = seafile_api.get_share_in_repo_list(username, -1, -1)
groups_repos = []
2015-10-27 12:12:00 +08:00
for group in seaserv.get_personal_groups_by_user(username):
# TODO: use seafile_api.get_group_repos
2015-10-27 12:12:00 +08:00
groups_repos += seaserv.get_group_repos(group.id, username)
if CLOUD_MODE:
public_repos = []
else:
2015-10-27 12:12:00 +08:00
public_repos = seaserv.list_inner_pub_repos(username)
for r in shared_repos + public_repos:
# collumn names in shared_repo struct are not same as owned or group
# repos.
r.id = r.repo_id
r.name = r.repo_name
r.desc = r.repo_desc
r.last_modify = r.last_modified
2012-12-28 16:16:40 +08:00
else:
owned_repos = seafile_api.get_org_owned_repo_list(org_id, username)
shared_repos = seafile_api.get_org_share_in_repo_list(org_id, username,
-1, -1)
groups_repos = []
for group in seaserv.get_org_groups_by_user(org_id, username):
groups_repos += seafile_api.get_org_group_repos(org_id, group.id)
2015-11-10 17:34:42 +08:00
public_repos = seaserv.seafserv_threaded_rpc.list_org_inner_pub_repos(org_id)
for r in shared_repos + groups_repos + public_repos:
# collumn names in shared_repo struct are not same as owned
# repos.
r.id = r.repo_id
r.name = r.repo_name
r.desc = r.repo_desc
r.last_modify = r.last_modified
2012-12-28 16:16:40 +08:00
return (owned_repos, shared_repos, groups_repos, public_repos)
2013-07-01 10:44:28 +08:00
def get_conf_text_ext():
"""
Get the conf of text ext in constance settings, and remove space.
"""
if hasattr(config, 'TEXT_PREVIEW_EXT'):
text_ext = getattr(config, 'TEXT_PREVIEW_EXT').split(',')
return [x.strip() for x in text_ext]
return []
def get_file_type_and_ext(filename):
"""
Return file type and extension if the file can be previewd online,
otherwise, return unknown type.
"""
fileExt = os.path.splitext(filename)[1][1:].lower()
if fileExt in get_conf_text_ext():
return (TEXT, fileExt)
filetype = FILEEXT_TYPE_MAP.get(fileExt)
if filetype:
return (filetype, fileExt)
else:
return ('Unknown', fileExt)
2014-05-23 13:44:53 +08:00
def get_file_revision_id_size(repo_id, commit_id, path):
2012-12-28 16:16:40 +08:00
"""Given a commit and a file path in that commit, return the seafile id
and size of the file blob
"""
2015-10-27 12:12:00 +08:00
repo = seafile_api.get_repo(repo_id)
2012-12-28 16:16:40 +08:00
dirname = os.path.dirname(path)
filename = os.path.basename(path)
2014-05-23 13:44:53 +08:00
seafdir = seafile_api.list_dir_by_commit_and_path(repo_id, commit_id, dirname)
2012-12-28 16:16:40 +08:00
for dirent in seafdir:
if dirent.obj_name == filename:
2015-10-27 12:12:00 +08:00
file_size = seafile_api.get_file_size(repo.store_id, repo.version,
dirent.obj_id)
2012-12-28 16:16:40 +08:00
return dirent.obj_id, file_size
return None, None
def new_merge_with_no_conflict(commit):
"""Check whether a commit is a new merge, and no conflict.
Arguments:
- `commit`:
"""
if commit.second_parent_id is not None and commit.new_merge is True and \
commit.conflict is False:
return True
else:
return False
def get_commit_before_new_merge(commit):
"""Traverse parents of ``commit``, and get a commit which is not a new merge.
Pre-condition: ``commit`` must be a new merge and not conflict.
Arguments:
- `commit`:
"""
assert new_merge_with_no_conflict(commit) is True
while(new_merge_with_no_conflict(commit)):
2015-10-27 12:12:00 +08:00
p1 = seaserv.get_commit(commit.repo_id, commit.version, commit.parent_id)
p2 = seaserv.get_commit(commit.repo_id, commit.version, commit.second_parent_id)
commit = p1 if p1.ctime > p2.ctime else p2
assert new_merge_with_no_conflict(commit) is False
return commit
2013-07-25 13:30:19 +08:00
def gen_inner_file_get_url(token, filename):
2014-07-01 11:48:35 +08:00
"""Generate inner fileserver file url.
2013-07-25 13:30:19 +08:00
2014-07-01 11:48:35 +08:00
If ``ENABLE_INNER_FILESERVER`` set to False(defaults to True), will
returns outer fileserver file url.
2013-07-25 13:30:19 +08:00
Arguments:
- `token`:
- `filename`:
Returns:
e.g., http://127.0.0.1:<port>/files/<token>/<filename>
"""
2014-07-01 11:48:35 +08:00
if ENABLE_INNER_FILESERVER:
return '%s/files/%s/%s' % (get_inner_fileserver_root(), token,
2013-07-25 13:30:19 +08:00
urlquote(filename))
else:
return gen_file_get_url(token, filename)
def get_max_upload_file_size():
"""Get max upload file size from config file, defaults to no limit.
Returns ``None`` if this value is not set.
"""
return seaserv.MAX_UPLOAD_FILE_SIZE
2013-08-04 19:31:36 +08:00
def gen_block_get_url(token, blkid):
"""
2014-07-01 11:48:35 +08:00
Generate fileserver block url.
2013-08-04 19:31:36 +08:00
Format: http://<domain:port>/blks/<token>/<blkid>
"""
if blkid:
2014-07-01 11:48:35 +08:00
return '%s/blks/%s/%s' % (get_fileserver_root(), token, blkid)
2013-08-04 19:31:36 +08:00
else:
2014-07-01 11:48:35 +08:00
return '%s/blks/%s/' % (get_fileserver_root(), token)
2013-08-04 19:31:36 +08:00
2012-12-28 16:16:40 +08:00
def gen_file_get_url(token, filename):
"""
2014-07-01 11:48:35 +08:00
Generate fileserver file url.
2012-12-28 16:16:40 +08:00
Format: http://<domain:port>/files/<token>/<filename>
"""
2014-07-01 11:48:35 +08:00
return '%s/files/%s/%s' % (get_fileserver_root(), token, urlquote(filename))
2012-12-28 16:16:40 +08:00
def gen_file_upload_url(token, op):
2014-07-01 11:48:35 +08:00
return '%s/%s/%s' % (get_fileserver_root(), op, token)
2012-12-28 16:16:40 +08:00
def gen_dir_zip_download_url(token):
"""
Generate fileserver file url.
Format: http://<domain:port>/files/<token>/<filename>
"""
return '%s/zip/%s' % (get_fileserver_root(), token)
2012-12-28 16:16:40 +08:00
def get_ccnet_server_addr_port():
"""get ccnet server host and port"""
2015-10-27 12:12:00 +08:00
return seaserv.CCNET_SERVER_ADDR, seaserv.CCNET_SERVER_PORT
2012-12-28 16:16:40 +08:00
def string2list(string):
"""
Split string contacted with different separators to a list, and remove
duplicated strings.
2012-12-28 16:16:40 +08:00
"""
tmp_str = string.replace(';', ',').replace('\n', ',').replace('\r', ',')
# Remove empty and duplicate strings
s = set()
for e in tmp_str.split(','):
e = e.strip(' ')
if not e:
continue
s.add(e)
return [ x for x in s ]
def is_org_context(request):
"""An organization context is a virtual private Seafile instance on cloud
service.
Arguments:
- `request`:
2012-12-28 16:16:40 +08:00
"""
return request.cloud_mode and request.user.org is not None
2014-05-23 13:44:53 +08:00
# events related
if EVENTS_CONFIG_FILE:
2016-01-09 16:40:39 +08:00
parsed_events_conf = ConfigParser.ConfigParser()
parsed_events_conf.read(EVENTS_CONFIG_FILE)
2012-12-28 16:16:40 +08:00
import seafevents
EVENTS_ENABLED = True
SeafEventsSession = seafevents.init_db_session_class(EVENTS_CONFIG_FILE)
2012-12-28 16:16:40 +08:00
@contextlib.contextmanager
def _get_seafevents_session():
try:
session = SeafEventsSession()
yield session
finally:
session.close()
2013-05-21 11:57:48 +08:00
def _same_events(e1, e2):
"""Two events are equal should follow two rules:
2015-03-24 14:33:07 +08:00
1. event1.repo_id = event2.repo_id
2. event1.commit.creator = event2.commit.creator
3. event1.commit.desc = event2.commit.desc
2013-05-21 11:57:48 +08:00
"""
2014-01-21 15:35:10 +08:00
if hasattr(e1, 'commit') and hasattr(e2, 'commit'):
2015-03-24 14:33:07 +08:00
if e1.repo_id == e2.repo_id and \
e1.commit.desc == e2.commit.desc and \
e1.commit.creator_name == e2.commit.creator_name:
2014-01-21 15:35:10 +08:00
return True
return False
2013-05-21 11:57:48 +08:00
2013-05-21 17:19:22 +08:00
def _get_events(username, start, count, org_id=None):
2012-12-28 16:16:40 +08:00
ev_session = SeafEventsSession()
2013-05-21 17:19:22 +08:00
2013-01-23 12:06:11 +08:00
valid_events = []
2013-05-22 16:17:58 +08:00
total_used = 0
2013-04-04 16:53:52 +08:00
try:
2013-05-22 16:17:58 +08:00
next_start = start
while True:
2014-08-30 15:23:04 +08:00
events = _get_events_inner(ev_session, username, next_start,
count, org_id)
2013-05-22 16:17:58 +08:00
if not events:
2013-05-21 17:19:22 +08:00
break
2013-05-22 16:17:58 +08:00
for e1 in events:
duplicate = False
for e2 in valid_events:
if _same_events(e1, e2): duplicate = True; break
new_merge = False
if hasattr(e1, 'commit') and e1.commit and \
new_merge_with_no_conflict(e1.commit):
new_merge = True
if not duplicate and not new_merge:
2013-05-22 16:17:58 +08:00
valid_events.append(e1)
total_used = total_used + 1
if len(valid_events) == count:
break
2013-05-22 16:17:58 +08:00
if len(valid_events) == count:
break
next_start = next_start + len(events)
2013-04-04 16:53:52 +08:00
finally:
ev_session.close()
2013-01-23 12:06:11 +08:00
2013-05-22 17:07:35 +08:00
for e in valid_events: # parse commit description
if hasattr(e, 'commit'):
e.commit.converted_cmmt_desc = convert_cmmt_desc_link(e.commit)
e.commit.more_files = more_files_in_commit(e.commit)
2013-05-22 17:07:35 +08:00
return valid_events, start + total_used
2013-01-23 12:06:11 +08:00
2014-08-30 15:23:04 +08:00
def _get_events_inner(ev_session, username, start, limit, org_id=None):
2013-05-21 17:19:22 +08:00
'''Read events from seafevents database, and remove events that are
2013-01-23 12:06:11 +08:00
no longer valid
2013-05-22 16:17:58 +08:00
Return 'limit' events or less than 'limit' events if no more events remain
2013-01-23 12:06:11 +08:00
'''
2013-01-03 17:28:25 +08:00
valid_events = []
2013-05-22 16:17:58 +08:00
next_start = start
while True:
2014-08-30 15:23:04 +08:00
if org_id > 0:
events = seafevents.get_org_user_events(ev_session, org_id,
username, next_start,
limit)
else:
events = seafevents.get_user_events(ev_session, username,
next_start, limit)
2013-05-22 16:17:58 +08:00
if not events:
break
for ev in events:
if ev.etype == 'repo-update':
2015-10-27 12:12:00 +08:00
repo = seafile_api.get_repo(ev.repo_id)
2013-05-22 16:17:58 +08:00
if not repo:
# delete the update event for repo which has been deleted
seafevents.delete_event(ev_session, ev.uuid)
continue
if repo.encrypted:
2015-11-05 15:24:43 +08:00
repo.password_set = seafile_api.is_password_set(
2015-10-27 12:12:00 +08:00
repo.id, username)
2013-05-22 16:17:58 +08:00
ev.repo = repo
2015-10-27 12:12:00 +08:00
ev.commit = seaserv.get_commit(repo.id, repo.version, ev.commit_id)
2013-05-22 16:17:58 +08:00
valid_events.append(ev)
if len(valid_events) == limit:
break
if len(valid_events) == limit:
break
2013-05-22 16:17:58 +08:00
next_start = next_start + len(valid_events)
return valid_events
2012-12-28 16:16:40 +08:00
2013-05-21 17:19:22 +08:00
def get_user_events(username, start, count):
"""Return user events list and a new start.
2013-05-21 17:19:22 +08:00
For example:
``get_user_events('foo@example.com', 0, 10)`` returns the first 10
events.
``get_user_events('foo@example.com', 5, 10)`` returns the 6th through
15th events.
"""
return _get_events(username, start, count)
2013-05-21 17:19:22 +08:00
def get_org_user_events(org_id, username, start, count):
return _get_events(username, start, count, org_id=org_id)
2013-01-03 17:28:25 +08:00
def get_log_events_by_time(log_type, tstart, tend):
"""Return log events list by start/end timestamp. (If no logs, return 'None')
"""
with _get_seafevents_session() as session:
events = seafevents.get_event_log_by_time(session, log_type, tstart, tend)
return events if events else None
2015-11-27 10:55:56 +08:00
def generate_file_audit_event_type(e):
return {
2015-12-03 17:56:28 +08:00
'file-download-web': ('web', ''),
'file-download-share-link': ('share-link',''),
'file-download-api': ('API', e.device),
'repo-download-sync': ('download-sync', e.device),
'repo-upload-sync': ('upload-sync', e.device),
2015-11-27 10:55:56 +08:00
}[e.etype]
def get_file_audit_events_by_path(email, org_id, repo_id, file_path, start, limit):
"""Return file audit events list by file path. (If no file audit, return 'None')
For example:
``get_file_audit_events_by_path(email, org_id, repo_id, file_path, 0, 10)`` returns the first 10
events.
``get_file_audit_events_by_path(email, org_id, repo_id, file_path, 5, 10)`` returns the 6th through
15th events.
"""
with _get_seafevents_session() as session:
events = seafevents.get_file_audit_events_by_path(session,
email, org_id, repo_id, file_path, start, limit)
return events if events else None
def get_file_audit_events(email, org_id, repo_id, start, limit):
"""Return file audit events list. (If no file audit, return 'None')
For example:
``get_file_audit_events(email, org_id, repo_id, 0, 10)`` returns the first 10
events.
``get_file_audit_events(email, org_id, repo_id, 5, 10)`` returns the 6th through
15th events.
"""
with _get_seafevents_session() as session:
events = seafevents.get_file_audit_events(session, email, org_id, repo_id, start, limit)
return events if events else None
def get_file_update_events(email, org_id, repo_id, start, limit):
"""Return file update events list. (If no file update, return 'None')
For example:
``get_file_update_events(email, org_id, repo_id, 0, 10)`` returns the first 10
events.
``get_file_update_events(email, org_id, repo_id, 5, 10)`` returns the 6th through
15th events.
"""
with _get_seafevents_session() as session:
events = seafevents.get_file_update_events(session, email, org_id, repo_id, start, limit)
return events if events else None
def get_perm_audit_events(email, org_id, repo_id, start, limit):
"""Return repo perm events list. (If no repo perm, return 'None')
For example:
``get_repo_perm_events(email, org_id, repo_id, 0, 10)`` returns the first 10
events.
``get_repo_perm_events(email, org_id, repo_id, 5, 10)`` returns the 6th through
15th events.
"""
with _get_seafevents_session() as session:
events = seafevents.get_perm_audit_events(session, email, org_id, repo_id, start, limit)
return events if events else None
2015-08-17 11:20:00 +08:00
def get_virus_record(repo_id=None, start=-1, limit=-1):
with _get_seafevents_session() as session:
r = seafevents.get_virus_record(session, repo_id, start, limit)
return r if r else []
def handle_virus_record(vid):
with _get_seafevents_session() as session:
return True if seafevents.handle_virus_record(session, vid) == 0 else False
def get_virus_record_by_id(vid):
with _get_seafevents_session() as session:
return seafevents.get_virus_record_by_id(session, vid)
2012-12-28 16:16:40 +08:00
else:
EVENTS_ENABLED = False
def get_user_events():
pass
def get_log_events_by_time():
pass
2012-12-28 16:16:40 +08:00
def get_org_user_events():
pass
2015-11-27 10:55:56 +08:00
def generate_file_audit_event_type():
pass
def get_file_audit_events_by_path():
pass
2015-04-01 15:27:57 +08:00
def get_file_audit_events():
pass
def get_file_update_events():
pass
def get_perm_audit_events():
pass
2015-08-17 11:20:00 +08:00
def get_virus_record():
pass
def handle_virus_record():
pass
def get_virus_record_by_id(vid):
pass
2012-12-28 16:16:40 +08:00
def calc_file_path_hash(path, bits=12):
if isinstance(path, unicode):
path = path.encode('UTF-8')
path_hash = hashlib.md5(urllib2.quote(path)).hexdigest()[:bits]
return path_hash
def get_service_url():
"""Get service url from seaserv.
"""
return config.SERVICE_URL
2013-07-18 18:16:23 +08:00
2014-04-18 12:13:43 +08:00
def get_server_id():
"""Get server id from seaserv.
"""
return getattr(seaserv, 'SERVER_ID', '-')
def get_site_scheme_and_netloc():
"""Return a string contains site scheme and network location part from
service url.
For example:
>>> get_site_scheme_and_netloc("https://example.com:8000/seafile/")
https://example.com:8000
"""
parse_result = urlparse(get_service_url())
return "%s://%s" % (parse_result.scheme, parse_result.netloc)
def send_html_email(subject, con_template, con_context, from_email, to_email,
reply_to=None):
"""Send HTML email
"""
base_context = {
'url_base': get_site_scheme_and_netloc(),
'site_name': SITE_NAME,
'media_url': MEDIA_URL,
'logo_path': LOGO_PATH,
}
t = loader.get_template(con_template)
con_context.update(base_context)
headers = {}
if IS_EMAIL_CONFIGURED:
if reply_to is not None:
headers['Reply-to'] = reply_to
msg = EmailMessage(subject, t.render(Context(con_context)), from_email,
to_email, headers=headers)
msg.content_subtype = "html"
msg.send()
2013-07-18 18:16:23 +08:00
def gen_dir_share_link(token):
"""Generate directory share link.
"""
return gen_shared_link(token, 'd')
def gen_file_share_link(token):
"""Generate file share link.
"""
return gen_shared_link(token, 'f')
def gen_shared_link(token, s_type):
service_url = get_service_url()
assert service_url is not None
2013-01-03 19:28:57 +08:00
2013-07-18 18:16:23 +08:00
service_url = service_url.rstrip('/')
2013-01-03 19:28:57 +08:00
if s_type == 'f':
2013-07-18 18:16:23 +08:00
return '%s/f/%s/' % (service_url, token)
2013-01-03 19:28:57 +08:00
else:
2013-07-18 18:16:23 +08:00
return '%s/d/%s/' % (service_url, token)
def gen_shared_upload_link(token):
service_url = get_service_url()
assert service_url is not None
service_url = service_url.rstrip('/')
return '%s/u/d/%s/' % (service_url, token)
def show_delete_days(request):
if request.method == 'GET':
days_str = request.GET.get('days', '')
elif request.method == 'POST':
days_str = request.POST.get('days', '')
else:
days_str = ''
try:
days = int(days_str)
except ValueError:
days = 7
return days
2013-01-09 17:28:56 +08:00
def is_textual_file(file_type):
"""
Check whether a file type is a textual file.
"""
2015-07-10 17:47:10 +08:00
if file_type == TEXT or file_type == MARKDOWN:
return True
else:
return False
2013-03-29 11:46:28 +08:00
def redirect_to_login(request):
from django.conf import settings
login_url = settings.LOGIN_URL
path = urlquote(request.get_full_path())
tup = login_url, redirect_field_name, path
return HttpResponseRedirect('%s?%s=%s' % tup)
def mkstemp():
'''Returns (fd, filepath), the same as tempfile.mkstemp, except the
filepath is encoded in UTF-8
'''
fd, path = tempfile.mkstemp()
system_encoding = locale.getdefaultlocale()[1]
if system_encoding is not None:
path_utf8 = path.decode(system_encoding).encode('UTF-8')
return fd, path_utf8
else:
return fd, path
# File or directory operations
2016-04-11 12:20:08 +08:00
FILE_OP = ('Added or modified', 'Added', 'Modified', 'Renamed', 'Moved',
2013-06-26 14:49:22 +08:00
'Added directory', 'Renamed directory', 'Moved directory')
OPS = '|'.join(FILE_OP)
CMMT_DESC_PATT = re.compile(r'(%s) "(.*)"\s?(and \d+ more (?:files|directories))?' % OPS)
2013-06-26 14:49:22 +08:00
API_OPS = '|'.join((OPS, 'Deleted', 'Removed'))
API_CMMT_DESC_PATT = r'(%s) "(.*)"\s?(and \d+ more (?:files|directories))?' % API_OPS
def convert_cmmt_desc_link(commit):
"""Wrap file/folder with ``<a></a>`` in commit description.
"""
repo_id = commit.repo_id
cmmt_id = commit.id
conv_link_url = reverse('convert_cmmt_desc_link')
def link_repl(matchobj):
op = matchobj.group(1)
file_or_dir = matchobj.group(2)
remaining = matchobj.group(3)
2014-05-04 17:58:15 +08:00
tmp_str = '%s "<a href="%s?repo_id=%s&cmmt_id=%s&nm=%s" class="normal">%s</a>"'
if remaining:
return (tmp_str + ' %s') % (op, conv_link_url, repo_id, cmmt_id, urlquote(file_or_dir),
escape(file_or_dir), remaining)
else:
return tmp_str % (op, conv_link_url, repo_id, cmmt_id, urlquote(file_or_dir), escape(file_or_dir))
return re.sub(CMMT_DESC_PATT, link_repl, commit.desc)
def api_tsstr_sec(value):
"""Turn a timestamp to string"""
try:
return datetime.fromtimestamp(value).strftime("%Y-%m-%d %H:%M:%S")
except:
return datetime.fromtimestamp(value/1000000).strftime("%Y-%m-%d %H:%M:%S")
def api_convert_desc_link(e):
"""Wrap file/folder with ``<a></a>`` in commit description.
"""
commit = e.commit
repo_id = commit.repo_id
cmmt_id = commit.id
def link_repl(matchobj):
op = matchobj.group(1)
file_or_dir = matchobj.group(2)
remaining = matchobj.group(3)
tmp_str = '%s "<span class="file-name">%s</span>"'
if remaining:
url = reverse('api_repo_history_changes', args=[repo_id])
e.link = "%s?commit_id=%s" % (url, cmmt_id)
e.dtime = api_tsstr_sec(commit.props.ctime)
return (tmp_str + ' %s') % (op, file_or_dir, remaining)
else:
2015-10-27 12:12:00 +08:00
diff_result = seafile_api.diff_commits(repo_id, '', cmmt_id)
if diff_result:
for d in diff_result:
if file_or_dir not in d.name:
# skip to next diff_result if file/folder user clicked does not
# match the diff_result
continue
if d.status == 'add' or d.status == 'mod':
e.link = "api://repo/%s/files/?p=/%s" % (repo_id, d.name)
elif d.status == 'mov':
e.link = "api://repo/%s/files/?p=/%s" % (repo_id, d.new_name)
elif d.status == 'newdir':
e.link = "api://repo/%s/dir/?p=/%s" % (repo_id, d.name)
else:
continue
return tmp_str % (op, file_or_dir)
e.desc = re.sub(API_CMMT_DESC_PATT, link_repl, commit.desc)
MORE_PATT = r'and \d+ more (?:files|directories)'
def more_files_in_commit(commit):
"""Check whether added/deleted/modified more files in commit description.
"""
return True if re.search(MORE_PATT, commit.desc) else False
2016-01-09 16:40:39 +08:00
# file audit related
FILE_AUDIT_ENABLED = False
if EVENTS_CONFIG_FILE:
def check_file_audit_enabled():
enabled = seafevents.is_audit_enabled(parsed_events_conf)
if enabled:
logging.debug('file audit: enabled')
else:
logging.debug('file audit: not enabled')
return enabled
FILE_AUDIT_ENABLED = check_file_audit_enabled()
# office convert related
HAS_OFFICE_CONVERTER = False
if EVENTS_CONFIG_FILE:
def check_office_converter_enabled():
2016-01-09 16:40:39 +08:00
enabled = seafevents.is_office_converter_enabled(parsed_events_conf)
2013-11-09 16:58:57 +08:00
if enabled:
logging.debug('office converter: enabled')
else:
logging.debug('office converter: not enabled')
return enabled
def get_office_converter_html_dir():
2016-01-09 16:40:39 +08:00
return seafevents.get_office_converter_html_dir(parsed_events_conf)
def get_office_converter_limit():
2016-01-09 16:40:39 +08:00
return seafevents.get_office_converter_limit(parsed_events_conf)
HAS_OFFICE_CONVERTER = check_office_converter_enabled()
if HAS_OFFICE_CONVERTER:
OFFICE_HTML_DIR = get_office_converter_html_dir()
OFFICE_PREVIEW_MAX_SIZE, OFFICE_PREVIEW_MAX_PAGES = get_office_converter_limit()
from seafevents.office_converter import OfficeConverterRpcClient
office_converter_rpc = None
2015-10-27 12:12:00 +08:00
2013-05-29 14:47:36 +08:00
def _get_office_converter_rpc():
global office_converter_rpc
if office_converter_rpc is None:
2015-10-27 12:12:00 +08:00
pool = ccnet.ClientPool(
seaserv.CCNET_CONF_PATH,
central_config_dir=seaserv.SEAFILE_CENTRAL_CONF_DIR
)
office_converter_rpc = OfficeConverterRpcClient(pool)
return office_converter_rpc
def office_convert_cluster_token(file_id):
from django.core import signing
s = '-'.join([file_id, datetime.now().strftime('%Y%m%d')])
return signing.Signer().sign(s)
def _office_convert_token_header(file_id):
return {
'X-Seafile-Office-Preview-Token': office_convert_cluster_token(file_id),
}
2014-11-20 22:53:02 +08:00
def cluster_delegate(delegate_func):
'''usage:
2016-03-16 16:21:53 +08:00
2014-11-20 22:53:02 +08:00
@cluster_delegate(funcA)
def func(*args):
...non-cluster logic goes here...
2016-03-16 16:21:53 +08:00
2014-11-20 22:53:02 +08:00
- In non-cluster mode, this decorator effectively does nothing.
- In cluster mode, if this node is not the office convert node,
funcA is called instead of the decorated function itself
'''
def decorated(func):
def real_func(*args, **kwargs):
cluster_internal = kwargs.pop('cluster_internal', False)
if CLUSTER_MODE and not OFFICE_CONVERTOR_NODE and not cluster_internal:
2014-11-20 22:53:02 +08:00
return delegate_func(*args)
else:
return func(*args)
return real_func
2014-11-20 22:53:02 +08:00
return decorated
def delegate_add_office_convert_task(file_id, doctype, raw_path):
url = urljoin(OFFICE_CONVERTOR_ROOT, '/office-convert/internal/add-task/')
data = urllib.urlencode({
'file_id': file_id,
'doctype': doctype,
'raw_path': raw_path,
})
headers = _office_convert_token_header(file_id)
ret = do_urlopen(url, data=data, headers=headers).read()
2014-11-20 22:53:02 +08:00
return json.loads(ret)
def delegate_query_office_convert_status(file_id, page):
2014-11-20 22:53:02 +08:00
url = urljoin(OFFICE_CONVERTOR_ROOT, '/office-convert/internal/status/')
url += '?file_id=%s&page=%s' % (file_id, page)
headers = _office_convert_token_header(file_id)
2014-11-20 22:53:02 +08:00
ret = do_urlopen(url, headers=headers).read()
2014-11-20 22:53:02 +08:00
return json.loads(ret)
def delegate_get_office_converted_page(request, repo_id, commit_id, path, static_filename, file_id):
url = urljoin(OFFICE_CONVERTOR_ROOT,
'/office-convert/internal/static/%s/%s%s/%s' % (
repo_id, commit_id, urlquote(path), urlquote(static_filename)))
url += '?file_id=' + file_id
headers = _office_convert_token_header(file_id)
2014-11-20 22:53:02 +08:00
timestamp = request.META.get('HTTP_IF_MODIFIED_SINCE')
if timestamp:
headers['If-Modified-Since'] = timestamp
try:
ret = do_urlopen(url, headers=headers)
data = ret.read()
except urllib2.HTTPError, e:
if timestamp and e.code == 304:
return HttpResponseNotModified()
else:
raise
content_type = ret.headers.get('content-type', None)
if content_type is None:
dummy, ext = os.path.splitext(os.path.basename(path))
content_type = mimetypes.types_map.get(ext, 'application/octet-stream')
resp = HttpResponse(data, content_type=content_type)
if 'last-modified' in ret.headers:
2014-11-20 22:53:02 +08:00
resp['Last-Modified'] = ret.headers.get('last-modified')
return resp
2014-11-20 22:53:02 +08:00
@cluster_delegate(delegate_add_office_convert_task)
def add_office_convert_task(file_id, doctype, raw_path):
2013-05-29 14:47:36 +08:00
rpc = _get_office_converter_rpc()
2014-11-20 22:53:02 +08:00
d = rpc.add_task(file_id, doctype, raw_path)
return {
'exists': False,
2014-11-20 22:53:02 +08:00
}
2014-11-20 22:53:02 +08:00
@cluster_delegate(delegate_query_office_convert_status)
def query_office_convert_status(file_id, page):
2013-05-29 14:47:36 +08:00
rpc = _get_office_converter_rpc()
d = rpc.query_convert_status(file_id, page)
2014-11-20 22:53:02 +08:00
ret = {}
if d.error:
ret['error'] = d.error
ret['status'] = 'ERROR'
2014-11-20 22:53:02 +08:00
else:
ret['success'] = True
ret['status'] = d.status
ret['info'] = d.info
2014-11-20 22:53:02 +08:00
return ret
2014-11-20 22:53:02 +08:00
@cluster_delegate(delegate_get_office_converted_page)
def get_office_converted_page(request, repo_id, commit_id, path, static_filename, file_id):
return django_static_serve(request,
os.path.join(file_id, static_filename),
document_root=OFFICE_HTML_DIR)
2013-05-29 14:47:36 +08:00
def prepare_converted_html(raw_path, obj_id, doctype, ret_dict):
try:
add_office_convert_task(obj_id, doctype, raw_path)
2013-05-29 14:47:36 +08:00
except:
logging.exception('failed to add_office_convert_task:')
return _(u'Internal error')
return None
2013-05-29 14:47:36 +08:00
# search realted
HAS_FILE_SEARCH = False
if EVENTS_CONFIG_FILE:
def check_search_enabled():
2013-11-09 16:58:57 +08:00
enabled = False
if hasattr(seafevents, 'is_search_enabled'):
2016-01-09 16:40:39 +08:00
enabled = seafevents.is_search_enabled(parsed_events_conf)
2013-11-09 16:58:57 +08:00
if enabled:
logging.debug('search: enabled')
else:
logging.debug('search: not enabled')
return enabled
HAS_FILE_SEARCH = check_search_enabled()
TRAFFIC_STATS_ENABLED = False
if EVENTS_CONFIG_FILE and hasattr(seafevents, 'get_user_traffic_stat'):
TRAFFIC_STATS_ENABLED = True
def get_user_traffic_stat(username):
session = SeafEventsSession()
try:
stat = seafevents.get_user_traffic_stat(session, username)
finally:
session.close()
return stat
2014-02-26 18:24:34 +08:00
def get_user_traffic_list(month, start=0, limit=25):
session = SeafEventsSession()
try:
stat = seafevents.get_user_traffic_list(session, month, start, limit)
finally:
session.close()
return stat
2014-04-18 15:16:18 +08:00
else:
def get_user_traffic_stat(username):
pass
2014-02-26 18:24:34 +08:00
def get_user_traffic_list():
pass
def user_traffic_over_limit(username):
"""Return ``True`` if user traffic over the limit, otherwise ``False``.
"""
if not CHECK_SHARE_LINK_TRAFFIC:
2014-04-18 15:16:18 +08:00
return False
from seahub_extra.plan.models import UserPlan
from seahub_extra.plan.settings import PLAN
up = UserPlan.objects.get_valid_plan_by_user(username)
plan = 'Free' if up is None else up.plan_type
traffic_limit = int(PLAN[plan]['share_link_traffic']) * 1024 * 1024 * 1024
try:
stat = get_user_traffic_stat(username)
except Exception as e:
2014-05-23 15:44:33 +08:00
logger = logging.getLogger(__name__)
logger.error('Failed to get user traffic stat: %s' % username,
exc_info=True)
return True
if stat is None: # No traffic record yet
return False
month_traffic = stat['file_view'] + stat['file_download'] + stat['dir_download']
return True if month_traffic >= traffic_limit else False
def is_user_password_strong(password):
"""Return ``True`` if user's password is STRONG, otherwise ``False``.
STRONG means password has at least USER_PASSWORD_STRENGTH_LEVEL(3) types of the bellow:
num, upper letter, lower letter, other symbols
"""
2015-09-21 11:48:07 +08:00
if len(password) < config.USER_PASSWORD_MIN_LENGTH:
return False
else:
num = 0
for letter in password:
# get ascii dec
# bitwise OR
num |= get_char_mode(ord(letter))
2015-09-21 11:48:07 +08:00
if calculate_bitwise(num) < config.USER_PASSWORD_STRENGTH_LEVEL:
return False
else:
return True
def get_char_mode(n):
"""Return different num according to the type of given letter:
'1': num,
'2': upper_letter,
'4': lower_letter,
'8': other symbols
"""
if (n >= 48 and n <= 57): #nums
return 1;
if (n >= 65 and n <= 90): #uppers
return 2;
if (n >= 97 and n <= 122): #lowers
return 4;
else:
return 8;
def calculate_bitwise(num):
"""Return different level according to the given num:
"""
level = 0
for i in range(4):
# bitwise AND
if (num&1):
level += 1
# Right logical shift
num = num >> 1
return level
2014-11-20 22:53:02 +08:00
def do_md5(s):
if isinstance(s, unicode):
s = s.encode('UTF-8')
return hashlib.md5(s).hexdigest()
def do_urlopen(url, data=None, headers=None):
headers = headers or {}
req = urllib2.Request(url, data=data, headers=headers)
ret = urllib2.urlopen(req)
return ret
2014-12-25 11:26:16 +08:00
def is_pro_version():
2016-03-16 16:21:53 +08:00
if seahub.settings.DEBUG:
if hasattr(seahub.settings, 'IS_PRO_VERSION') \
and seahub.settings.IS_PRO_VERSION:
return True
2014-12-25 11:26:16 +08:00
if EVENTS_CONFIG_FILE:
return True
else:
return False
def clear_token(username):
'''
clear web api and repo sync token
when delete/inactive an user
'''
Token.objects.filter(user = username).delete()
TokenV2.objects.filter(user = username).delete()
seafile_api.delete_repo_tokens_by_email(username)
def send_perm_audit_msg(etype, from_user, to, repo_id, path, perm):
"""Send repo permission audit msg.
Arguments:
2015-07-16 18:37:48 +08:00
- `etype`: add/modify/delete-repo-perm
- `from_user`: email
- `to`: email or group_id or all(public)
- `repo_id`: origin repo id
- `path`: dir path
- `perm`: r or rw
"""
msg = 'perm-update\t%s\t%s\t%s\t%s\t%s\t%s' % \
(etype, from_user, to, repo_id, path, perm)
msg_utf8 = msg.encode('utf-8')
try:
2015-10-27 12:12:00 +08:00
seaserv.send_message('seahub.stats', msg_utf8)
except Exception as e:
logger.error("Error when sending perm-audit-%s message: %s" %
(etype, str(e)))
def get_origin_repo_info(repo_id):
repo = seafile_api.get_repo(repo_id)
if repo.origin_repo_id is not None:
origin_repo_id = repo.origin_repo_id
origin_path = repo.origin_path
return (origin_repo_id, origin_path)
return (None, None)
def within_time_range(d1, d2, maxdiff_seconds):
'''Return true if two datetime.datetime object differs less than the given seconds'''
delta = d2 - d1 if d2 > d1 else d1 - d2
# delta.total_seconds() is only available in python 2.7+
diff = (delta.microseconds + (delta.seconds + delta.days*24*3600) * 1e6) / 1e6
return diff < maxdiff_seconds
2016-01-26 10:38:19 +08:00
def is_org_repo_creation_allowed(request):
"""Whether or not allow a user create organization library.
"""
if request.user.is_staff:
return True
else:
return config.ENABLE_USER_CREATE_ORG_REPO
2016-04-19 19:03:47 +08:00
def get_system_admins():
db_users = seaserv.get_emailusers('DB', -1, -1)
ldpa_imported_users = seaserv.get_emailusers('LDAPImport', -1, -1)
admins = []
for user in db_users + ldpa_imported_users:
if user.is_staff:
admins.append(user)
return admins
2016-07-04 15:31:55 +08:00
def is_windows_operating_system(request):
if not request.META.has_key('HTTP_USER_AGENT'):
return False
if 'windows' in request.META['HTTP_USER_AGENT'].lower():
return True
else:
return False