2012-12-28 16:16:40 +08:00
|
|
|
# encoding: utf-8
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import urllib2
|
2013-01-19 17:50:38 +08:00
|
|
|
import uuid
|
2013-02-28 17:27:13 +08:00
|
|
|
import logging
|
2013-09-14 18:11:11 +08:00
|
|
|
import hashlib
|
2013-04-25 13:43:09 +08:00
|
|
|
import tempfile
|
|
|
|
import locale
|
2013-05-28 12:13:10 +08:00
|
|
|
import ConfigParser
|
2013-06-08 14:52:45 +08:00
|
|
|
from datetime import datetime
|
2013-10-17 18:20:44 +08:00
|
|
|
from urlparse import urlparse
|
2013-05-28 12:13:10 +08:00
|
|
|
|
|
|
|
import ccnet
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-05-20 21:47:51 +08:00
|
|
|
from django.core.urlresolvers import reverse
|
2014-02-19 16:34:09 +08:00
|
|
|
from django.core.mail import EmailMessage
|
2012-12-28 16:16:40 +08:00
|
|
|
from django.shortcuts import render_to_response
|
2014-02-19 16:34:09 +08:00
|
|
|
from django.template import RequestContext, Context, loader
|
2013-01-21 11:34:18 +08:00
|
|
|
from django.utils.translation import ugettext as _
|
2014-05-23 13:44:53 +08:00
|
|
|
from django.http import HttpResponseRedirect
|
2013-03-29 11:46:28 +08:00
|
|
|
from django.utils.http import urlquote
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-11-13 17:20:44 +08:00
|
|
|
import seaserv
|
2013-06-16 17:13:18 +08:00
|
|
|
from seaserv import seafile_api
|
2014-05-23 13:44:53 +08:00
|
|
|
from seaserv import seafserv_rpc, seafserv_threaded_rpc, get_repo, get_commits,\
|
|
|
|
CCNET_SERVER_ADDR, CCNET_SERVER_PORT, get_org_by_id, is_org_staff, \
|
|
|
|
get_org_id_by_group, get_personal_groups_by_user, \
|
|
|
|
list_personal_repos_by_owner, get_group_repos, \
|
|
|
|
list_inner_pub_repos, CCNET_CONF_PATH, SERVICE_URL
|
2013-05-18 14:17:23 +08:00
|
|
|
import seahub.settings
|
2014-02-19 16:34:09 +08:00
|
|
|
from seahub.settings import SITE_NAME, MEDIA_URL, LOGO_PATH
|
2012-12-28 16:16:40 +08:00
|
|
|
try:
|
2013-05-28 12:13:10 +08:00
|
|
|
from seahub.settings import EVENTS_CONFIG_FILE
|
2012-12-28 16:16:40 +08:00
|
|
|
except ImportError:
|
2013-05-28 12:13:10 +08:00
|
|
|
EVENTS_CONFIG_FILE = None
|
2013-01-09 17:28:56 +08:00
|
|
|
try:
|
2013-05-02 19:27:17 +08:00
|
|
|
from seahub.settings import EMAIL_HOST
|
2013-01-09 17:28:56 +08:00
|
|
|
IS_EMAIL_CONFIGURED = True
|
|
|
|
except ImportError:
|
|
|
|
IS_EMAIL_CONFIGURED = False
|
2013-03-04 13:33:58 +08:00
|
|
|
try:
|
|
|
|
from seahub.settings import CLOUD_MODE
|
|
|
|
except ImportError:
|
|
|
|
CLOUD_MODE = False
|
2013-07-25 13:30:19 +08:00
|
|
|
try:
|
|
|
|
from seahub.settings import ENABLE_INNER_HTTPSERVER
|
|
|
|
except ImportError:
|
|
|
|
ENABLE_INNER_HTTPSERVER = True
|
2014-04-28 10:55:18 +08:00
|
|
|
|
|
|
|
try:
|
|
|
|
from seahub.settings import CHECK_SHARE_LINK_TRAFFIC
|
|
|
|
except ImportError:
|
|
|
|
CHECK_SHARE_LINK_TRAFFIC = False
|
|
|
|
|
2013-11-27 15:27:38 +08:00
|
|
|
from seahub.utils.file_types import *
|
2014-05-23 13:44:53 +08:00
|
|
|
from seahub.utils.htmldiff import HtmlDiff # used in views/files.py
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-02-28 17:27:13 +08:00
|
|
|
|
2012-12-28 16:16:40 +08:00
|
|
|
EMPTY_SHA1 = '0000000000000000000000000000000000000000'
|
2014-05-23 13:44:53 +08:00
|
|
|
MAX_INT = 2147483647
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
PREVIEW_FILEEXT = {
|
2013-06-03 19:55:51 +08:00
|
|
|
TEXT: ('ac', 'am', 'bat', 'c', 'cc', 'cmake', 'cpp', 'cs', 'css', 'diff', 'el', 'h', 'html', 'htm', 'java', 'js', 'json', 'less', 'make', 'org', 'php', 'pl', 'properties', 'py', 'rb', 'scala', 'script', 'sh', 'sql', 'txt', 'text', 'tex', 'vi', 'vim', 'xhtml', 'xml', 'log', 'csv', 'groovy', 'rst', 'patch', 'go'),
|
2013-03-26 14:19:56 +08:00
|
|
|
IMAGE: ('gif', 'jpeg', 'jpg', 'png', 'ico'),
|
2014-01-18 16:45:38 +08:00
|
|
|
DOCUMENT: ('doc', 'docx', 'ppt', 'pptx'),
|
|
|
|
SPREADSHEET: ('xls', 'xlsx'),
|
2013-12-30 19:48:29 +01:00
|
|
|
# SVG: ('svg',),
|
2013-03-26 14:19:56 +08:00
|
|
|
PDF: ('pdf',),
|
2013-12-30 19:40:06 +01:00
|
|
|
OPENDOCUMENT: ('odt', 'fodt', 'odp', 'fodp', 'ods', 'fods'),
|
2013-03-26 14:19:56 +08:00
|
|
|
MARKDOWN: ('markdown', 'md'),
|
|
|
|
SF: ('seaf',),
|
2014-03-17 11:12:40 +08:00
|
|
|
VIDEO: ('mp4', 'ogv', 'webm', 'flv', 'wmv'),
|
2014-06-07 00:42:05 +02:00
|
|
|
AUDIO: ('mp3', 'oga', 'ogg'),
|
2013-04-17 04:43:06 -07:00
|
|
|
'3D': ('stl', 'obj'),
|
2012-12-28 16:16:40 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
def gen_fileext_type_map():
|
|
|
|
"""
|
|
|
|
Generate previewed file extension and file type relation map.
|
|
|
|
|
|
|
|
"""
|
|
|
|
d = {}
|
|
|
|
for filetype in PREVIEW_FILEEXT.keys():
|
|
|
|
for fileext in PREVIEW_FILEEXT.get(filetype):
|
|
|
|
d[fileext] = filetype
|
|
|
|
|
|
|
|
return d
|
|
|
|
FILEEXT_TYPE_MAP = gen_fileext_type_map()
|
|
|
|
|
|
|
|
def render_permission_error(request, msg=None, extra_ctx=None):
|
|
|
|
"""
|
|
|
|
Return permisson error page.
|
|
|
|
|
|
|
|
"""
|
|
|
|
ctx = {}
|
2013-01-31 11:49:54 +08:00
|
|
|
ctx['error_msg'] = msg or _('permission error')
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
if extra_ctx:
|
|
|
|
for k in extra_ctx:
|
|
|
|
ctx[k] = extra_ctx[k]
|
|
|
|
|
|
|
|
return render_to_response('permission_error.html', ctx,
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
|
|
|
def render_error(request, msg=None, extra_ctx=None):
|
|
|
|
"""
|
|
|
|
Return normal error page.
|
|
|
|
|
|
|
|
"""
|
|
|
|
ctx = {}
|
2013-01-21 11:34:18 +08:00
|
|
|
ctx['error_msg'] = msg or _('Internal error')
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
if extra_ctx:
|
|
|
|
for k in extra_ctx:
|
|
|
|
ctx[k] = extra_ctx[k]
|
|
|
|
|
|
|
|
return render_to_response('error.html', ctx,
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
|
|
|
def list_to_string(l):
|
|
|
|
"""
|
|
|
|
Return string of a list.
|
|
|
|
|
|
|
|
"""
|
|
|
|
return ','.join(l)
|
|
|
|
|
|
|
|
def get_httpserver_root():
|
2013-07-25 13:30:19 +08:00
|
|
|
""" Construct seafile httpserver address and port.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
Constructed httpserver root.
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
|
|
|
|
2013-07-25 13:30:19 +08:00
|
|
|
from seahub.settings import HTTP_SERVER_ROOT
|
|
|
|
|
|
|
|
assert HTTP_SERVER_ROOT is not None, "SERVICE_URL is not set in ccnet.conf."
|
|
|
|
|
|
|
|
return HTTP_SERVER_ROOT
|
|
|
|
|
|
|
|
def get_inner_httpserver_root():
|
|
|
|
"""Construct inner seafile httpserver address and port.
|
|
|
|
|
|
|
|
Inner httpserver root allows Seahub access httpserver through local
|
|
|
|
address, thus avoiding the overhead of DNS queries, as well as other
|
|
|
|
related issues, for example, the server can not ping itself, etc.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
http://127.0.0.1:<port>
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
2013-05-18 14:17:23 +08:00
|
|
|
|
2013-07-25 13:30:19 +08:00
|
|
|
return seahub.settings.INNER_HTTP_SERVER_ROOT
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
def gen_token(max_length=5):
|
|
|
|
"""
|
|
|
|
Generate a random token.
|
|
|
|
|
|
|
|
"""
|
2013-01-19 17:50:38 +08:00
|
|
|
|
2013-01-19 19:23:48 +08:00
|
|
|
return uuid.uuid4().hex[:max_length]
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-08-06 11:34:05 +08:00
|
|
|
def normalize_cache_key(value, prefix=None):
|
|
|
|
"""Returns a cache key consisten of ``value`` and ``prefix``. Cache key
|
|
|
|
must not include control characters or whitespace.
|
|
|
|
"""
|
|
|
|
key = value if prefix is None else prefix + value
|
|
|
|
return urlquote(key)
|
|
|
|
|
2013-08-12 18:04:12 +08:00
|
|
|
def get_repo_last_modify(repo):
|
|
|
|
""" Get last modification time for a repo.
|
|
|
|
|
|
|
|
If head commit id of a repo is provided, we use that commit as last commit,
|
|
|
|
otherwise falls back to getting last commit of a repo which is time
|
|
|
|
consuming.
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
2013-08-12 18:04:12 +08:00
|
|
|
if repo.head_cmmt_id is not None:
|
2014-03-11 19:59:08 +08:00
|
|
|
last_cmmt = seafserv_threaded_rpc.get_commit(repo.id, repo.version, repo.head_cmmt_id)
|
2013-08-12 18:04:12 +08:00
|
|
|
else:
|
2014-05-23 15:44:33 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
2013-08-12 18:04:12 +08:00
|
|
|
logger.info('[repo %s] head_cmmt_id is missing.' % repo.id)
|
|
|
|
last_cmmt = get_commits(repo.id, 0, 1)[0]
|
|
|
|
return last_cmmt.ctime if last_cmmt else 0
|
|
|
|
|
|
|
|
def calculate_repos_last_modify(repo_list):
|
|
|
|
""" Get last modification time for repos.
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
|
|
|
for repo in repo_list:
|
2013-08-12 18:04:12 +08:00
|
|
|
repo.latest_modify = get_repo_last_modify(repo)
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-06-24 17:56:31 +08:00
|
|
|
def normalize_dir_path(path):
|
|
|
|
"""Add '/' at the end of directory path if necessary.
|
|
|
|
"""
|
|
|
|
if path[-1] != '/':
|
|
|
|
path = path + '/'
|
|
|
|
return path
|
|
|
|
|
|
|
|
def normalize_file_path(path):
|
|
|
|
"""Remove '/' at the end of file path if necessary.
|
|
|
|
"""
|
|
|
|
return path.rstrip('/')
|
2013-08-02 10:34:04 +08:00
|
|
|
|
2014-04-15 13:37:36 +08:00
|
|
|
# modified from django1.5:/core/validators, and remove the support for single
|
|
|
|
# quote in email address
|
|
|
|
email_re = re.compile(
|
|
|
|
r"(^[-!#$%&*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
|
|
|
|
# quoted-string, see also http://tools.ietf.org/html/rfc2822#section-3.2.5
|
|
|
|
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-\011\013\014\016-\177])*"'
|
|
|
|
r')@((?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)$)' # domain
|
|
|
|
r'|\[(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}\]$', re.IGNORECASE) # literal form, ipv4 address (SMTP 4.1.3)
|
|
|
|
|
2013-08-02 10:34:04 +08:00
|
|
|
def is_valid_email(email):
|
2014-01-08 18:23:14 +08:00
|
|
|
"""A heavy email format validation.
|
2013-08-02 10:34:04 +08:00
|
|
|
"""
|
2014-01-08 18:23:14 +08:00
|
|
|
return True if email_re.match(email) is not None else False
|
|
|
|
|
|
|
|
def is_valid_username(username):
|
|
|
|
"""Check whether username is valid, currently only email can be a username.
|
|
|
|
"""
|
|
|
|
return is_valid_email(username)
|
2014-04-15 13:37:36 +08:00
|
|
|
|
2014-05-19 11:40:53 +08:00
|
|
|
def is_ldap_user(user):
|
|
|
|
"""Check whether user is a LDAP user.
|
|
|
|
"""
|
|
|
|
return user.source == 'LDAP'
|
|
|
|
|
2012-12-28 16:16:40 +08:00
|
|
|
def check_filename_with_rename(repo_id, parent_dir, filename):
|
2013-04-16 11:32:58 +08:00
|
|
|
cmmts = get_commits(repo_id, 0, 1)
|
|
|
|
latest_commit = cmmts[0] if cmmts else None
|
2012-12-28 16:16:40 +08:00
|
|
|
if not latest_commit:
|
|
|
|
return ''
|
2013-01-23 05:38:52 -08:00
|
|
|
# TODO: what if parrent_dir does not exist?
|
2014-03-11 19:59:08 +08:00
|
|
|
dirents = seafile_api.list_dir_by_commit_and_path(repo_id, latest_commit.id,
|
2013-07-29 16:04:04 +08:00
|
|
|
parent_dir.encode('utf-8'))
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
def no_duplicate(name):
|
|
|
|
for dirent in dirents:
|
|
|
|
if dirent.obj_name == name:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
|
|
|
def make_new_name(filename, i):
|
|
|
|
base, ext = os.path.splitext(filename)
|
|
|
|
if ext:
|
|
|
|
new_base = "%s (%d)" % (base, i)
|
|
|
|
return new_base + ext
|
|
|
|
else:
|
|
|
|
return "%s (%d)" % (filename, i)
|
|
|
|
|
|
|
|
if no_duplicate(filename):
|
|
|
|
return filename
|
|
|
|
else:
|
|
|
|
i = 1
|
|
|
|
while True:
|
|
|
|
new_name = make_new_name (filename, i)
|
|
|
|
if no_duplicate(new_name):
|
|
|
|
return new_name
|
|
|
|
else:
|
|
|
|
i += 1
|
|
|
|
|
2014-05-29 15:26:57 +08:00
|
|
|
def get_user_repos(username, org_id=None):
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
2013-03-04 13:33:58 +08:00
|
|
|
Get all repos that user can access, including owns, shared, public, and
|
|
|
|
repo in groups.
|
2014-05-29 15:26:57 +08:00
|
|
|
If ``org_id`` is not None, get org repos that user can access.
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
2014-05-29 15:26:57 +08:00
|
|
|
if org_id is None:
|
|
|
|
owned_repos = list_personal_repos_by_owner(username)
|
|
|
|
shared_repos = seafile_api.get_share_in_repo_list(username, -1, -1)
|
|
|
|
groups_repos = []
|
|
|
|
for group in get_personal_groups_by_user(username):
|
|
|
|
# TODO: use seafile_api.get_group_repos
|
|
|
|
groups_repos += get_group_repos(group.id, username)
|
|
|
|
if CLOUD_MODE:
|
|
|
|
public_repos = []
|
|
|
|
else:
|
|
|
|
public_repos = list_inner_pub_repos(username)
|
|
|
|
|
|
|
|
for r in shared_repos + public_repos:
|
|
|
|
# collumn names in shared_repo struct are not same as owned or group
|
|
|
|
# repos.
|
|
|
|
r.id = r.repo_id
|
|
|
|
r.name = r.repo_name
|
|
|
|
r.desc = r.repo_desc
|
|
|
|
r.last_modify = r.last_modified
|
2012-12-28 16:16:40 +08:00
|
|
|
else:
|
2014-05-29 15:26:57 +08:00
|
|
|
owned_repos = seafile_api.get_org_owned_repo_list(org_id, username)
|
|
|
|
shared_repos = seafile_api.get_org_share_in_repo_list(org_id, username,
|
|
|
|
-1, -1)
|
|
|
|
groups_repos = []
|
|
|
|
for group in seaserv.get_org_groups_by_user(org_id, username):
|
|
|
|
groups_repos += seafile_api.get_org_group_repos(org_id, group.id)
|
|
|
|
public_repos = seaserv.seafserv_threaded_rpc.list_org_inner_pub_repos(org_id)
|
|
|
|
|
|
|
|
for r in shared_repos + groups_repos + public_repos:
|
|
|
|
# collumn names in shared_repo struct are not same as owned
|
|
|
|
# repos.
|
|
|
|
r.id = r.repo_id
|
|
|
|
r.name = r.repo_name
|
|
|
|
r.desc = r.repo_desc
|
|
|
|
r.last_modify = r.last_modified
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-03-04 13:33:58 +08:00
|
|
|
return (owned_repos, shared_repos, groups_repos, public_repos)
|
2013-07-01 10:44:28 +08:00
|
|
|
|
2013-03-08 10:20:35 +08:00
|
|
|
def get_file_type_and_ext(filename):
|
|
|
|
"""
|
|
|
|
Return file type and extension if the file can be previewd online,
|
|
|
|
otherwise, return unknown type.
|
|
|
|
"""
|
|
|
|
fileExt = os.path.splitext(filename)[1][1:].lower()
|
|
|
|
filetype = FILEEXT_TYPE_MAP.get(fileExt)
|
|
|
|
if filetype:
|
|
|
|
return (filetype, fileExt)
|
|
|
|
else:
|
|
|
|
return ('Unknown', fileExt)
|
2014-05-23 13:44:53 +08:00
|
|
|
|
|
|
|
def get_file_revision_id_size(repo_id, commit_id, path):
|
2012-12-28 16:16:40 +08:00
|
|
|
"""Given a commit and a file path in that commit, return the seafile id
|
|
|
|
and size of the file blob
|
|
|
|
|
|
|
|
"""
|
2014-03-11 19:59:08 +08:00
|
|
|
repo = get_repo(repo_id)
|
2012-12-28 16:16:40 +08:00
|
|
|
dirname = os.path.dirname(path)
|
|
|
|
filename = os.path.basename(path)
|
2014-05-23 13:44:53 +08:00
|
|
|
seafdir = seafile_api.list_dir_by_commit_and_path(repo_id, commit_id, dirname)
|
2012-12-28 16:16:40 +08:00
|
|
|
for dirent in seafdir:
|
|
|
|
if dirent.obj_name == filename:
|
2014-03-11 19:59:08 +08:00
|
|
|
file_size = seafserv_threaded_rpc.get_file_size(repo.store_id, repo.version,
|
|
|
|
dirent.obj_id)
|
2012-12-28 16:16:40 +08:00
|
|
|
return dirent.obj_id, file_size
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2014-02-17 16:10:46 +08:00
|
|
|
def new_merge_with_no_conflict(commit):
|
|
|
|
"""Check whether a commit is a new merge, and no conflict.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
- `commit`:
|
|
|
|
"""
|
|
|
|
if commit.second_parent_id is not None and commit.new_merge is True and \
|
|
|
|
commit.conflict is False:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_commit_before_new_merge(commit):
|
|
|
|
"""Traverse parents of ``commit``, and get a commit which is not a new merge.
|
|
|
|
|
|
|
|
Pre-condition: ``commit`` must be a new merge and not conflict.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
- `commit`:
|
|
|
|
"""
|
|
|
|
assert new_merge_with_no_conflict(commit) is True
|
|
|
|
|
|
|
|
while(new_merge_with_no_conflict(commit)):
|
2014-03-11 19:59:08 +08:00
|
|
|
p1 = seafserv_threaded_rpc.get_commit(commit.repo_id, commit.version, commit.parent_id)
|
|
|
|
p2 = seafserv_threaded_rpc.get_commit(commit.repo_id, commit.version, commit.second_parent_id)
|
2014-02-17 16:10:46 +08:00
|
|
|
commit = p1 if p1.ctime > p2.ctime else p2
|
|
|
|
|
|
|
|
assert new_merge_with_no_conflict(commit) is False
|
|
|
|
|
|
|
|
return commit
|
|
|
|
|
2013-07-25 13:30:19 +08:00
|
|
|
def gen_inner_file_get_url(token, filename):
|
|
|
|
"""Generate inner httpserver file url.
|
|
|
|
|
|
|
|
If ``ENABLE_INNER_HTTPSERVER`` set to False(defaults to True), will
|
|
|
|
returns outer httpserver file url.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
- `token`:
|
|
|
|
- `filename`:
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
e.g., http://127.0.0.1:<port>/files/<token>/<filename>
|
|
|
|
"""
|
|
|
|
if ENABLE_INNER_HTTPSERVER:
|
|
|
|
return '%s/files/%s/%s' % (get_inner_httpserver_root(), token,
|
|
|
|
urlquote(filename))
|
|
|
|
else:
|
|
|
|
return gen_file_get_url(token, filename)
|
2013-11-13 17:20:44 +08:00
|
|
|
|
|
|
|
def get_max_upload_file_size():
|
|
|
|
"""Get max upload file size from config file, defaults to no limit.
|
|
|
|
|
|
|
|
Returns ``None`` if this value is not set.
|
|
|
|
"""
|
|
|
|
return seaserv.MAX_UPLOAD_FILE_SIZE
|
2013-07-25 13:30:19 +08:00
|
|
|
|
2013-08-04 19:31:36 +08:00
|
|
|
def gen_block_get_url(token, blkid):
|
|
|
|
"""
|
|
|
|
Generate httpserver block url.
|
|
|
|
Format: http://<domain:port>/blks/<token>/<blkid>
|
|
|
|
"""
|
|
|
|
if blkid:
|
|
|
|
return '%s/blks/%s/%s' % (get_httpserver_root(), token, blkid)
|
|
|
|
else:
|
|
|
|
return '%s/blks/%s/' % (get_httpserver_root(), token)
|
|
|
|
|
2012-12-28 16:16:40 +08:00
|
|
|
def gen_file_get_url(token, filename):
|
|
|
|
"""
|
|
|
|
Generate httpserver file url.
|
|
|
|
Format: http://<domain:port>/files/<token>/<filename>
|
|
|
|
"""
|
2013-04-05 15:04:45 +08:00
|
|
|
return '%s/files/%s/%s' % (get_httpserver_root(), token, urlquote(filename))
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
def gen_file_upload_url(token, op):
|
|
|
|
return '%s/%s/%s' % (get_httpserver_root(), op, token)
|
|
|
|
|
|
|
|
def get_ccnet_server_addr_port():
|
|
|
|
"""get ccnet server host and port"""
|
|
|
|
return CCNET_SERVER_ADDR, CCNET_SERVER_PORT
|
|
|
|
|
|
|
|
def string2list(string):
|
|
|
|
"""
|
2014-03-05 14:10:05 +08:00
|
|
|
Split string contacted with different separators to a list, and remove
|
|
|
|
duplicated strings.
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
|
|
|
tmp_str = string.replace(';', ',').replace('\n', ',').replace('\r', ',')
|
|
|
|
# Remove empty and duplicate strings
|
|
|
|
s = set()
|
|
|
|
for e in tmp_str.split(','):
|
|
|
|
e = e.strip(' ')
|
|
|
|
if not e:
|
|
|
|
continue
|
|
|
|
s.add(e)
|
|
|
|
return [ x for x in s ]
|
|
|
|
|
|
|
|
# def get_cur_ctx(request):
|
|
|
|
# ctx_dict = request.session.get('current_context', {
|
|
|
|
# 'base_template': 'myhome_base.html',
|
|
|
|
# 'org_dict': None})
|
|
|
|
# return ctx_dict
|
|
|
|
|
|
|
|
# def set_cur_ctx(request, ctx_dict):
|
|
|
|
# request.session['current_context'] = ctx_dict
|
|
|
|
# request.user.org = ctx_dict.get('org_dict', None)
|
|
|
|
|
2014-03-05 14:10:05 +08:00
|
|
|
def is_org_context(request):
|
|
|
|
"""An organization context is a virtual private Seafile instance on cloud
|
|
|
|
service.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
- `request`:
|
2012-12-28 16:16:40 +08:00
|
|
|
"""
|
2014-03-05 14:10:05 +08:00
|
|
|
return request.cloud_mode and request.user.org is not None
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2014-03-05 14:10:05 +08:00
|
|
|
# def check_and_get_org_by_repo(repo_id, user):
|
|
|
|
# """
|
|
|
|
# Check whether repo is org repo, get org info if it is, and set
|
|
|
|
# base template.
|
|
|
|
# """
|
|
|
|
# org_id = get_org_id_by_repo_id(repo_id)
|
|
|
|
# if org_id > 0:
|
|
|
|
# # this repo is org repo, get org info
|
|
|
|
# org = get_org_by_id(org_id)
|
|
|
|
# org._dict['is_staff'] = is_org_staff(org_id, user)
|
|
|
|
# org._dict['email'] = user
|
|
|
|
# base_template = 'org_base.html'
|
|
|
|
# else:
|
|
|
|
# org = None
|
|
|
|
# base_template = 'myhome_base.html'
|
|
|
|
|
|
|
|
# return org, base_template
|
2012-12-28 16:16:40 +08:00
|
|
|
|
|
|
|
def check_and_get_org_by_group(group_id, user):
|
|
|
|
"""
|
|
|
|
Check whether repo is org repo, get org info if it is, and set
|
|
|
|
base template.
|
|
|
|
"""
|
|
|
|
org_id = get_org_id_by_group(group_id)
|
|
|
|
if org_id > 0:
|
|
|
|
# this repo is org repo, get org info
|
|
|
|
org = get_org_by_id(org_id)
|
|
|
|
org._dict['is_staff'] = is_org_staff(org_id, user)
|
|
|
|
org._dict['email'] = user
|
|
|
|
base_template = 'org_base.html'
|
|
|
|
else:
|
|
|
|
org = None
|
|
|
|
base_template = 'myhome_base.html'
|
|
|
|
|
|
|
|
return org, base_template
|
2013-08-30 15:15:18 +08:00
|
|
|
|
2014-05-23 13:44:53 +08:00
|
|
|
# events related
|
2013-05-28 12:13:10 +08:00
|
|
|
if EVENTS_CONFIG_FILE:
|
2012-12-28 16:16:40 +08:00
|
|
|
import seafevents
|
|
|
|
|
|
|
|
EVENTS_ENABLED = True
|
2013-05-28 12:13:10 +08:00
|
|
|
SeafEventsSession = seafevents.init_db_session_class(EVENTS_CONFIG_FILE)
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-05-21 11:57:48 +08:00
|
|
|
def _same_events(e1, e2):
|
|
|
|
"""Two events are equal should follow two rules:
|
2014-01-21 15:35:10 +08:00
|
|
|
1. event1.commit.creator = event2.commit.creator
|
2013-05-21 11:57:48 +08:00
|
|
|
2. event1.commit.desc = event2.commit.desc
|
|
|
|
"""
|
2014-01-21 15:35:10 +08:00
|
|
|
if hasattr(e1, 'commit') and hasattr(e2, 'commit'):
|
|
|
|
if e1.commit.desc == e2.commit.desc and \
|
|
|
|
e1.commit.creator_name == e2.commit.creator_name:
|
|
|
|
return True
|
|
|
|
return False
|
2013-05-21 11:57:48 +08:00
|
|
|
|
2013-05-21 17:19:22 +08:00
|
|
|
def _get_events(username, start, count, org_id=None):
|
2012-12-28 16:16:40 +08:00
|
|
|
ev_session = SeafEventsSession()
|
2013-05-21 17:19:22 +08:00
|
|
|
|
2013-01-23 12:06:11 +08:00
|
|
|
valid_events = []
|
2013-05-22 16:17:58 +08:00
|
|
|
total_used = 0
|
2013-04-04 16:53:52 +08:00
|
|
|
try:
|
2013-05-22 16:17:58 +08:00
|
|
|
next_start = start
|
|
|
|
while True:
|
|
|
|
events = _get_events_inner(ev_session, username, next_start, count)
|
|
|
|
if not events:
|
2013-05-21 17:19:22 +08:00
|
|
|
break
|
2013-05-22 16:17:58 +08:00
|
|
|
|
|
|
|
for e1 in events:
|
|
|
|
duplicate = False
|
|
|
|
for e2 in valid_events:
|
|
|
|
if _same_events(e1, e2): duplicate = True; break
|
2014-02-17 16:10:46 +08:00
|
|
|
|
|
|
|
new_merge = False
|
|
|
|
if hasattr(e1, 'commit') and new_merge_with_no_conflict(e1.commit):
|
|
|
|
new_merge = True
|
|
|
|
|
|
|
|
if not duplicate and not new_merge:
|
2013-05-22 16:17:58 +08:00
|
|
|
valid_events.append(e1)
|
|
|
|
total_used = total_used + 1
|
|
|
|
if len(valid_events) == count:
|
|
|
|
break
|
|
|
|
|
|
|
|
if len(valid_events) == count:
|
|
|
|
break
|
|
|
|
next_start = next_start + len(events)
|
2013-04-04 16:53:52 +08:00
|
|
|
finally:
|
|
|
|
ev_session.close()
|
2013-01-23 12:06:11 +08:00
|
|
|
|
2013-05-22 17:07:35 +08:00
|
|
|
for e in valid_events: # parse commit description
|
2013-05-20 21:47:51 +08:00
|
|
|
if hasattr(e, 'commit'):
|
|
|
|
e.commit.converted_cmmt_desc = convert_cmmt_desc_link(e.commit)
|
|
|
|
e.commit.more_files = more_files_in_commit(e.commit)
|
2013-05-22 17:07:35 +08:00
|
|
|
return valid_events, start + total_used
|
2013-01-23 12:06:11 +08:00
|
|
|
|
2013-05-22 16:17:58 +08:00
|
|
|
def _get_events_inner(ev_session, username, start, limit):
|
2013-05-21 17:19:22 +08:00
|
|
|
'''Read events from seafevents database, and remove events that are
|
2013-01-23 12:06:11 +08:00
|
|
|
no longer valid
|
|
|
|
|
2013-05-22 16:17:58 +08:00
|
|
|
Return 'limit' events or less than 'limit' events if no more events remain
|
2013-01-23 12:06:11 +08:00
|
|
|
'''
|
2013-01-03 17:28:25 +08:00
|
|
|
valid_events = []
|
2013-05-22 16:17:58 +08:00
|
|
|
next_start = start
|
|
|
|
while True:
|
|
|
|
events = seafevents.get_user_events(ev_session, username,
|
|
|
|
next_start, limit)
|
|
|
|
if not events:
|
|
|
|
break
|
|
|
|
|
|
|
|
for ev in events:
|
|
|
|
if ev.etype == 'repo-update':
|
|
|
|
repo = get_repo(ev.repo_id)
|
|
|
|
if not repo:
|
|
|
|
# delete the update event for repo which has been deleted
|
|
|
|
seafevents.delete_event(ev_session, ev.uuid)
|
|
|
|
continue
|
|
|
|
if repo.encrypted:
|
|
|
|
repo.password_set = seafserv_rpc.is_passwd_set(repo.id, username)
|
|
|
|
ev.repo = repo
|
2014-03-11 19:59:08 +08:00
|
|
|
ev.commit = seafserv_threaded_rpc.get_commit(repo.id, repo.version, ev.commit_id)
|
2013-05-22 16:17:58 +08:00
|
|
|
|
|
|
|
valid_events.append(ev)
|
|
|
|
if len(valid_events) == limit:
|
|
|
|
break
|
|
|
|
|
|
|
|
if len(valid_events) == limit:
|
|
|
|
break
|
|
|
|
next_start = next_start + len(valid_events)
|
|
|
|
|
|
|
|
return valid_events
|
2012-12-28 16:16:40 +08:00
|
|
|
|
2013-05-21 17:19:22 +08:00
|
|
|
def get_user_events(username, start, count):
|
|
|
|
"""Return user events list and a new start.
|
2013-01-03 17:28:25 +08:00
|
|
|
|
2013-05-21 17:19:22 +08:00
|
|
|
For example:
|
|
|
|
``get_user_events('foo@example.com', 0, 10)`` returns the first 10
|
|
|
|
events.
|
|
|
|
``get_user_events('foo@example.com', 5, 10)`` returns the 6th through
|
|
|
|
15th events.
|
|
|
|
"""
|
|
|
|
return _get_events(username, start, count)
|
2013-01-03 17:28:25 +08:00
|
|
|
|
2013-05-21 17:19:22 +08:00
|
|
|
def get_org_user_events(org_id, username, start, count):
|
|
|
|
return _get_events(username, start, count, org_id=org_id)
|
2013-01-03 17:28:25 +08:00
|
|
|
|
2012-12-28 16:16:40 +08:00
|
|
|
else:
|
|
|
|
EVENTS_ENABLED = False
|
|
|
|
def get_user_events():
|
|
|
|
pass
|
|
|
|
def get_org_user_events():
|
|
|
|
pass
|
|
|
|
|
2012-12-31 17:32:13 +08:00
|
|
|
def calc_file_path_hash(path, bits=12):
|
|
|
|
if isinstance(path, unicode):
|
|
|
|
path = path.encode('UTF-8')
|
|
|
|
|
2013-09-14 18:11:11 +08:00
|
|
|
path_hash = hashlib.md5(urllib2.quote(path)).hexdigest()[:bits]
|
2012-12-31 17:32:13 +08:00
|
|
|
|
|
|
|
return path_hash
|
2013-01-03 19:30:50 +08:00
|
|
|
|
2013-07-06 10:53:31 +08:00
|
|
|
def get_service_url():
|
|
|
|
"""Get service url from seaserv.
|
|
|
|
"""
|
2013-07-18 18:16:23 +08:00
|
|
|
return SERVICE_URL
|
|
|
|
|
2014-04-18 12:13:43 +08:00
|
|
|
def get_server_id():
|
|
|
|
"""Get server id from seaserv.
|
|
|
|
"""
|
|
|
|
return getattr(seaserv, 'SERVER_ID', '-')
|
|
|
|
|
2013-10-17 18:20:44 +08:00
|
|
|
def get_site_scheme_and_netloc():
|
|
|
|
"""Return a string contains site scheme and network location part from
|
|
|
|
service url.
|
|
|
|
|
|
|
|
For example:
|
|
|
|
>>> get_site_scheme_and_netloc("https://example.com:8000/seafile/")
|
|
|
|
https://example.com:8000
|
|
|
|
|
|
|
|
"""
|
|
|
|
parse_result = urlparse(get_service_url())
|
|
|
|
return "%s://%s" % (parse_result.scheme, parse_result.netloc)
|
2014-02-19 16:34:09 +08:00
|
|
|
|
|
|
|
def send_html_email(subject, con_template, con_context, from_email, to_email):
|
|
|
|
"""Send HTML email
|
|
|
|
"""
|
|
|
|
base_context = {
|
|
|
|
'url_base': get_site_scheme_and_netloc(),
|
|
|
|
'site_name': SITE_NAME,
|
|
|
|
'media_url': MEDIA_URL,
|
|
|
|
'logo_path': LOGO_PATH,
|
|
|
|
}
|
|
|
|
t = loader.get_template(con_template)
|
|
|
|
con_context.update(base_context)
|
|
|
|
msg = EmailMessage(subject, t.render(Context(con_context)), from_email, to_email)
|
|
|
|
msg.content_subtype = "html"
|
|
|
|
msg.send()
|
|
|
|
|
2013-07-18 18:16:23 +08:00
|
|
|
def gen_dir_share_link(token):
|
|
|
|
"""Generate directory share link.
|
|
|
|
"""
|
|
|
|
return gen_shared_link(token, 'd')
|
|
|
|
|
|
|
|
def gen_file_share_link(token):
|
|
|
|
"""Generate file share link.
|
|
|
|
"""
|
|
|
|
return gen_shared_link(token, 'f')
|
|
|
|
|
|
|
|
def gen_shared_link(token, s_type):
|
|
|
|
service_url = get_service_url()
|
|
|
|
assert service_url is not None
|
2013-01-03 19:28:57 +08:00
|
|
|
|
2013-07-18 18:16:23 +08:00
|
|
|
service_url = service_url.rstrip('/')
|
2013-01-03 19:28:57 +08:00
|
|
|
if s_type == 'f':
|
2013-07-18 18:16:23 +08:00
|
|
|
return '%s/f/%s/' % (service_url, token)
|
2013-01-03 19:28:57 +08:00
|
|
|
else:
|
2013-07-18 18:16:23 +08:00
|
|
|
return '%s/d/%s/' % (service_url, token)
|
2013-01-03 19:30:50 +08:00
|
|
|
|
2013-11-13 10:29:52 +08:00
|
|
|
def gen_shared_upload_link(token):
|
|
|
|
service_url = get_service_url()
|
|
|
|
assert service_url is not None
|
|
|
|
|
|
|
|
service_url = service_url.rstrip('/')
|
|
|
|
return '%s/u/d/%s/' % (service_url, token)
|
|
|
|
|
2013-01-04 11:32:50 +08:00
|
|
|
def show_delete_days(request):
|
|
|
|
if request.method == 'GET':
|
|
|
|
days_str = request.GET.get('days', '')
|
|
|
|
elif request.method == 'POST':
|
|
|
|
days_str = request.POST.get('days', '')
|
|
|
|
else:
|
|
|
|
days_str = ''
|
|
|
|
|
|
|
|
try:
|
|
|
|
days = int(days_str)
|
|
|
|
except ValueError:
|
|
|
|
days = 7
|
|
|
|
|
|
|
|
return days
|
2013-01-09 17:28:56 +08:00
|
|
|
|
2013-03-08 10:20:35 +08:00
|
|
|
def is_textual_file(file_type):
|
|
|
|
"""
|
|
|
|
Check whether a file type is a textual file.
|
|
|
|
"""
|
2013-03-26 14:19:56 +08:00
|
|
|
if file_type == TEXT or file_type == MARKDOWN or file_type == SF:
|
2013-03-08 10:20:35 +08:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2013-03-29 11:46:28 +08:00
|
|
|
def redirect_to_login(request):
|
|
|
|
from django.conf import settings
|
|
|
|
login_url = settings.LOGIN_URL
|
|
|
|
path = urlquote(request.get_full_path())
|
|
|
|
tup = login_url, redirect_field_name, path
|
|
|
|
return HttpResponseRedirect('%s?%s=%s' % tup)
|
2013-04-08 16:53:31 +08:00
|
|
|
|
2013-04-25 13:43:09 +08:00
|
|
|
def mkstemp():
|
|
|
|
'''Returns (fd, filepath), the same as tempfile.mkstemp, except the
|
|
|
|
filepath is encoded in UTF-8
|
|
|
|
|
|
|
|
'''
|
|
|
|
fd, path = tempfile.mkstemp()
|
|
|
|
system_encoding = locale.getdefaultlocale()[1]
|
2013-05-03 14:39:39 +08:00
|
|
|
if system_encoding is not None:
|
|
|
|
path_utf8 = path.decode(system_encoding).encode('UTF-8')
|
|
|
|
return fd, path_utf8
|
|
|
|
else:
|
|
|
|
return fd, path
|
2013-04-25 13:43:09 +08:00
|
|
|
|
2013-05-20 21:47:51 +08:00
|
|
|
# File or directory operations
|
|
|
|
FILE_OP = ('Added', 'Modified', 'Renamed', 'Moved',
|
2013-06-26 14:49:22 +08:00
|
|
|
'Added directory', 'Renamed directory', 'Moved directory')
|
2013-05-20 21:47:51 +08:00
|
|
|
|
|
|
|
OPS = '|'.join(FILE_OP)
|
2013-08-05 10:16:52 +08:00
|
|
|
CMMT_DESC_PATT = re.compile(r'(%s) "(.*)"\s?(and \d+ more (?:files|directories))?' % OPS)
|
|
|
|
|
2013-06-26 14:49:22 +08:00
|
|
|
API_OPS = '|'.join((OPS, 'Deleted', 'Removed'))
|
2013-06-08 14:52:45 +08:00
|
|
|
API_CMMT_DESC_PATT = r'(%s) "(.*)"\s?(and \d+ more (?:files|directories))?' % API_OPS
|
2013-05-20 21:47:51 +08:00
|
|
|
|
2013-08-05 10:16:52 +08:00
|
|
|
|
2013-05-20 21:47:51 +08:00
|
|
|
def convert_cmmt_desc_link(commit):
|
|
|
|
"""Wrap file/folder with ``<a></a>`` in commit description.
|
|
|
|
"""
|
|
|
|
repo_id = commit.repo_id
|
|
|
|
cmmt_id = commit.id
|
2013-08-05 10:16:52 +08:00
|
|
|
conv_link_url = reverse('convert_cmmt_desc_link')
|
2013-05-20 21:47:51 +08:00
|
|
|
|
|
|
|
def link_repl(matchobj):
|
|
|
|
op = matchobj.group(1)
|
|
|
|
file_or_dir = matchobj.group(2)
|
|
|
|
remaining = matchobj.group(3)
|
|
|
|
|
2014-05-04 17:58:15 +08:00
|
|
|
tmp_str = '%s "<a href="%s?repo_id=%s&cmmt_id=%s&nm=%s" class="normal">%s</a>"'
|
2013-05-20 21:47:51 +08:00
|
|
|
if remaining:
|
2013-08-05 10:16:52 +08:00
|
|
|
return (tmp_str + ' %s') % (op, conv_link_url, repo_id, cmmt_id, urlquote(file_or_dir),
|
2013-05-20 21:47:51 +08:00
|
|
|
file_or_dir, remaining)
|
|
|
|
else:
|
2013-08-05 10:16:52 +08:00
|
|
|
return tmp_str % (op, conv_link_url, repo_id, cmmt_id, urlquote(file_or_dir), file_or_dir)
|
2013-05-20 21:47:51 +08:00
|
|
|
|
|
|
|
return re.sub(CMMT_DESC_PATT, link_repl, commit.desc)
|
|
|
|
|
2013-06-08 14:52:45 +08:00
|
|
|
def api_tsstr_sec(value):
|
|
|
|
"""Turn a timestamp to string"""
|
|
|
|
try:
|
|
|
|
return datetime.fromtimestamp(value).strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
except:
|
|
|
|
return datetime.fromtimestamp(value/1000000).strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
|
|
|
|
|
|
|
def api_convert_desc_link(e):
|
|
|
|
"""Wrap file/folder with ``<a></a>`` in commit description.
|
|
|
|
"""
|
|
|
|
commit = e.commit
|
|
|
|
repo_id = commit.repo_id
|
|
|
|
cmmt_id = commit.id
|
|
|
|
|
|
|
|
def link_repl(matchobj):
|
|
|
|
op = matchobj.group(1)
|
|
|
|
file_or_dir = matchobj.group(2)
|
|
|
|
remaining = matchobj.group(3)
|
|
|
|
|
2013-06-22 14:45:02 +08:00
|
|
|
tmp_str = '%s "<span class="file-name">%s</span>"'
|
2013-06-08 14:52:45 +08:00
|
|
|
if remaining:
|
|
|
|
url = reverse('api_repo_history_changes', args=[repo_id])
|
|
|
|
e.link = "%s?commit_id=%s" % (url, cmmt_id)
|
|
|
|
e.dtime = api_tsstr_sec(commit.props.ctime)
|
|
|
|
return (tmp_str + ' %s') % (op, file_or_dir, remaining)
|
|
|
|
else:
|
|
|
|
diff_result = seafserv_threaded_rpc.get_diff(repo_id, '', cmmt_id)
|
|
|
|
if diff_result:
|
|
|
|
for d in diff_result:
|
|
|
|
if file_or_dir not in d.name:
|
|
|
|
# skip to next diff_result if file/folder user clicked does not
|
|
|
|
# match the diff_result
|
|
|
|
continue
|
|
|
|
|
2013-07-12 21:12:54 +08:00
|
|
|
if d.status == 'add' or d.status == 'mod':
|
2013-06-08 14:52:45 +08:00
|
|
|
e.link = "api://repo/%s/files/?p=/%s" % (repo_id, d.name)
|
2013-07-12 21:12:54 +08:00
|
|
|
elif d.status == 'mov':
|
|
|
|
e.link = "api://repo/%s/files/?p=/%s" % (repo_id, d.new_name)
|
2013-06-08 14:52:45 +08:00
|
|
|
elif d.status == 'newdir':
|
2013-07-11 23:29:13 +08:00
|
|
|
e.link = "api://repo/%s/dir/?p=/%s" % (repo_id, d.name)
|
2013-06-08 14:52:45 +08:00
|
|
|
else:
|
|
|
|
continue
|
|
|
|
return tmp_str % (op, file_or_dir)
|
|
|
|
e.desc = re.sub(API_CMMT_DESC_PATT, link_repl, commit.desc)
|
|
|
|
|
2013-05-20 21:47:51 +08:00
|
|
|
MORE_PATT = r'and \d+ more (?:files|directories)'
|
|
|
|
def more_files_in_commit(commit):
|
|
|
|
"""Check whether added/deleted/modified more files in commit description.
|
|
|
|
"""
|
|
|
|
return True if re.search(MORE_PATT, commit.desc) else False
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
# office convert related
|
|
|
|
HAS_OFFICE_CONVERTER = False
|
2013-05-28 12:13:10 +08:00
|
|
|
if EVENTS_CONFIG_FILE:
|
|
|
|
def check_office_converter_enabled():
|
2013-11-27 12:48:50 +08:00
|
|
|
config = ConfigParser.ConfigParser()
|
|
|
|
config.read(EVENTS_CONFIG_FILE)
|
|
|
|
enabled = seafevents.is_office_converter_enabled(config)
|
2013-11-09 16:58:57 +08:00
|
|
|
|
2013-11-27 12:48:50 +08:00
|
|
|
if enabled:
|
|
|
|
logging.debug('office converter: enabled')
|
|
|
|
else:
|
|
|
|
logging.debug('office converter: not enabled')
|
2013-05-29 12:13:02 +08:00
|
|
|
return enabled
|
2013-05-28 12:13:10 +08:00
|
|
|
|
|
|
|
def get_office_converter_html_dir():
|
|
|
|
config = ConfigParser.ConfigParser()
|
|
|
|
config.read(EVENTS_CONFIG_FILE)
|
|
|
|
return seafevents.get_office_converter_html_dir(config)
|
|
|
|
|
2013-06-05 18:41:54 +08:00
|
|
|
def get_office_converter_limit():
|
|
|
|
config = ConfigParser.ConfigParser()
|
|
|
|
config.read(EVENTS_CONFIG_FILE)
|
|
|
|
return seafevents.get_office_converter_limit(config)
|
|
|
|
|
2013-05-28 12:13:10 +08:00
|
|
|
HAS_OFFICE_CONVERTER = check_office_converter_enabled()
|
2013-05-29 12:13:02 +08:00
|
|
|
|
|
|
|
if HAS_OFFICE_CONVERTER:
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
OFFICE_HTML_DIR = get_office_converter_html_dir()
|
2013-06-05 18:41:54 +08:00
|
|
|
OFFICE_PREVIEW_MAX_SIZE, OFFICE_PREVIEW_MAX_PAGES = get_office_converter_limit()
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
from seafevents.office_converter import OfficeConverterRpcClient
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
office_converter_rpc = None
|
2013-05-29 14:47:36 +08:00
|
|
|
def _get_office_converter_rpc():
|
2013-05-29 12:13:02 +08:00
|
|
|
global office_converter_rpc
|
|
|
|
if office_converter_rpc is None:
|
|
|
|
pool = ccnet.ClientPool(CCNET_CONF_PATH)
|
|
|
|
office_converter_rpc = OfficeConverterRpcClient(pool)
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
return office_converter_rpc
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
def add_office_convert_task(file_id, doctype, url):
|
2013-05-29 14:47:36 +08:00
|
|
|
rpc = _get_office_converter_rpc()
|
2013-05-29 12:13:02 +08:00
|
|
|
return rpc.add_task(file_id, doctype, url)
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
def query_office_convert_status(file_id):
|
2013-05-29 14:47:36 +08:00
|
|
|
rpc = _get_office_converter_rpc()
|
2013-05-29 12:13:02 +08:00
|
|
|
return rpc.query_convert_status(file_id)
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
def query_office_file_pages(file_id):
|
2013-05-29 14:47:36 +08:00
|
|
|
rpc = _get_office_converter_rpc()
|
2013-05-29 12:13:02 +08:00
|
|
|
return rpc.query_file_pages(file_id)
|
2013-05-29 14:47:36 +08:00
|
|
|
|
|
|
|
def get_converted_html_detail(file_id):
|
|
|
|
d = {}
|
|
|
|
outline_file = os.path.join(OFFICE_HTML_DIR, file_id, 'file.outline')
|
|
|
|
|
|
|
|
with open(outline_file, 'r') as fp:
|
|
|
|
outline = fp.read()
|
|
|
|
|
|
|
|
page_num = query_office_file_pages(file_id).count
|
|
|
|
|
|
|
|
d['outline'] = outline
|
|
|
|
d['page_num'] = page_num
|
|
|
|
|
|
|
|
return d
|
|
|
|
|
|
|
|
def prepare_converted_html(raw_path, obj_id, doctype, ret_dict):
|
|
|
|
try:
|
|
|
|
ret = add_office_convert_task(obj_id, doctype, raw_path)
|
|
|
|
except:
|
2013-06-05 18:41:54 +08:00
|
|
|
logging.exception('failed to add_office_convert_task:')
|
2013-05-29 14:47:36 +08:00
|
|
|
return _(u'Internal error'), False
|
|
|
|
else:
|
2014-01-18 16:45:38 +08:00
|
|
|
if ret.exists and (doctype not in ('xls', 'xlsx')):
|
2013-05-29 14:47:36 +08:00
|
|
|
try:
|
|
|
|
ret_dict['html_detail'] = get_converted_html_detail(obj_id)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
return None, ret.exists
|
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
# search realted
|
|
|
|
HAS_FILE_SEARCH = False
|
|
|
|
if EVENTS_CONFIG_FILE:
|
|
|
|
def check_search_enabled():
|
2013-11-09 16:58:57 +08:00
|
|
|
enabled = False
|
2013-11-27 12:48:50 +08:00
|
|
|
if hasattr(seafevents, 'is_search_enabled'):
|
2013-11-09 16:58:57 +08:00
|
|
|
config = ConfigParser.ConfigParser()
|
|
|
|
config.read(EVENTS_CONFIG_FILE)
|
|
|
|
enabled = seafevents.is_search_enabled(config)
|
|
|
|
|
|
|
|
if enabled:
|
|
|
|
logging.debug('search: enabled')
|
|
|
|
else:
|
|
|
|
logging.debug('search: not enabled')
|
2013-05-29 12:13:02 +08:00
|
|
|
return enabled
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-05-29 12:13:02 +08:00
|
|
|
HAS_FILE_SEARCH = check_search_enabled()
|
2013-05-28 12:13:10 +08:00
|
|
|
|
2013-11-15 12:13:41 +08:00
|
|
|
TRAFFIC_STATS_ENABLED = False
|
|
|
|
if EVENTS_CONFIG_FILE and hasattr(seafevents, 'get_user_traffic_stat'):
|
|
|
|
TRAFFIC_STATS_ENABLED = True
|
|
|
|
def get_user_traffic_stat(username):
|
|
|
|
session = SeafEventsSession()
|
|
|
|
try:
|
|
|
|
stat = seafevents.get_user_traffic_stat(session, username)
|
|
|
|
finally:
|
|
|
|
session.close()
|
|
|
|
return stat
|
2014-02-26 18:24:34 +08:00
|
|
|
|
|
|
|
def get_user_traffic_list(month, start=0, limit=25):
|
|
|
|
session = SeafEventsSession()
|
|
|
|
try:
|
|
|
|
stat = seafevents.get_user_traffic_list(session, month, start, limit)
|
|
|
|
finally:
|
|
|
|
session.close()
|
|
|
|
return stat
|
2014-04-18 15:16:18 +08:00
|
|
|
|
2013-11-15 12:13:41 +08:00
|
|
|
else:
|
|
|
|
def get_user_traffic_stat(username):
|
|
|
|
pass
|
2014-02-26 18:24:34 +08:00
|
|
|
def get_user_traffic_list():
|
|
|
|
pass
|
2014-04-28 10:55:18 +08:00
|
|
|
|
|
|
|
def user_traffic_over_limit(username):
|
|
|
|
"""Return ``True`` if user traffic over the limit, otherwise ``False``.
|
|
|
|
"""
|
|
|
|
if not CHECK_SHARE_LINK_TRAFFIC:
|
2014-04-18 15:16:18 +08:00
|
|
|
return False
|
2014-04-28 10:55:18 +08:00
|
|
|
|
|
|
|
from seahub_extra.plan.models import UserPlan
|
|
|
|
from seahub_extra.plan.settings import PLAN
|
|
|
|
up = UserPlan.objects.get_valid_plan_by_user(username)
|
|
|
|
plan = 'Free' if up is None else up.plan_type
|
|
|
|
traffic_limit = int(PLAN[plan]['share_link_traffic']) * 1024 * 1024 * 1024
|
|
|
|
|
|
|
|
try:
|
|
|
|
stat = get_user_traffic_stat(username)
|
|
|
|
except Exception as e:
|
2014-05-23 15:44:33 +08:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
logger.error('Failed to get user traffic stat: %s' % username,
|
|
|
|
exc_info=True)
|
2014-04-28 10:55:18 +08:00
|
|
|
return True
|
|
|
|
|
2014-05-23 13:33:16 +08:00
|
|
|
if stat is None: # No traffic record yet
|
|
|
|
return False
|
|
|
|
|
2014-04-28 10:55:18 +08:00
|
|
|
month_traffic = stat['file_view'] + stat['file_download'] + stat['dir_download']
|
|
|
|
return True if month_traffic >= traffic_limit else False
|