mirror of
https://github.com/haiwen/seahub.git
synced 2025-09-16 15:19:06 +00:00
show last modified in file list
* Also handles the situation when list contributors failed
This commit is contained in:
@@ -161,3 +161,24 @@ class UserStarredFiles(models.Model):
|
|||||||
|
|
||||||
path = models.TextField()
|
path = models.TextField()
|
||||||
is_dir = models.BooleanField()
|
is_dir = models.BooleanField()
|
||||||
|
|
||||||
|
class DirFilesLastModifiedInfo(models.Model):
|
||||||
|
'''Cache the results of the calculation of last modified time of all the
|
||||||
|
files under a directory <parent_dir> in repo <repo_id>.
|
||||||
|
|
||||||
|
The field "last_modified_info" is the json format of a dict whose keys are
|
||||||
|
the file names and values are their corresponding last modified
|
||||||
|
timestamps.
|
||||||
|
|
||||||
|
The field "dir_id" is used to check whether the cache should be
|
||||||
|
re-computed
|
||||||
|
|
||||||
|
'''
|
||||||
|
repo_id = models.CharField(max_length=36)
|
||||||
|
parent_dir = models.TextField()
|
||||||
|
parent_dir_hash = models.CharField(max_length=12)
|
||||||
|
dir_id = models.CharField(max_length=40)
|
||||||
|
last_modified_info = models.TextField()
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
unique_together = ('repo_id', 'parent_dir_hash')
|
@@ -39,15 +39,22 @@
|
|||||||
<div id="file">
|
<div id="file">
|
||||||
<div id="file-op">
|
<div id="file-op">
|
||||||
<div class="commit fleft">
|
<div class="commit fleft">
|
||||||
{% avatar latest_contributor 20 %} <a href="{% url 'user_profile' latest_contributor %}" class="name">{{ latest_contributor|email2nickname }}</a><span class="time">{{ last_modified|translate_seahub_time}}</span>
|
{% if latest_contributor %}
|
||||||
|
{% avatar latest_contributor 20 %} <a href="{% url 'user_profile' latest_contributor %}" class="name">{{ latest_contributor|email2nickname }}</a>
|
||||||
|
{% endif %}
|
||||||
|
{% if last_modified %}
|
||||||
|
<span class="time">{{ last_modified|translate_seahub_time}}</span>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{% if filetype == 'Markdown' or filetype == 'Text' %}
|
{% if filetype == 'Markdown' or filetype == 'Text' %}
|
||||||
{% if last_commit_id %}
|
{% if last_commit_id %}
|
||||||
<span>{% trans "updated this file"%}, <a class="file-diff" href="{% url 'text_diff' repo.id %}?p={{path|urlencode}}&commit={{last_commit_id}}">{% trans "Detail"%}</a>.</span>
|
<span>{% trans "updated this file"%}, <a class="file-diff" href="{% url 'text_diff' repo.id %}?p={{path|urlencode}}&commit={{last_commit_id}}">{% trans "Detail"%}</a>.</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% else %}
|
{% else %}
|
||||||
|
{% if last_commit_id %}
|
||||||
<span>{% trans "updated this file"%}.</span>
|
<span>{% trans "updated this file"%}.</span>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
<span class="contributors">{% blocktrans count counter=contributors|length %}one contributor{% plural %} {{ counter }} contributors {% endblocktrans %}</span>
|
<span class="contributors">{% blocktrans count counter=contributors|length %}one contributor{% plural %} {{ counter }} contributors {% endblocktrans %}</span>
|
||||||
{% for user in contributors %}
|
{% for user in contributors %}
|
||||||
|
@@ -93,9 +93,10 @@
|
|||||||
<tr>
|
<tr>
|
||||||
<th width="3%"></th>
|
<th width="3%"></th>
|
||||||
<th width="5%"></th>
|
<th width="5%"></th>
|
||||||
<th width="53%">{% trans "Name"%}</th>
|
<th width="40%">{% trans "Name"%}</th>
|
||||||
<th width="10%">{% trans "Size"%}</th>
|
<th width="10%">{% trans "Size"%}</th>
|
||||||
<th width="29%">{% trans "Operations"%}</th>
|
<th width="15%">{% trans "Modified" %}</th>
|
||||||
|
<th width="27%">{% trans "Operations"%}</th>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
{% for dirent in dir_list %}
|
{% for dirent in dir_list %}
|
||||||
@@ -107,6 +108,13 @@
|
|||||||
</td>
|
</td>
|
||||||
|
|
||||||
<td></td>
|
<td></td>
|
||||||
|
<td>
|
||||||
|
{% if dirent.last_modified %}
|
||||||
|
{{ dirent.last_modified|translate_seahub_time }}
|
||||||
|
{% else %}
|
||||||
|
{% trans "Internal Server Error" %}
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
<td>
|
<td>
|
||||||
{% if user_perm %}
|
{% if user_perm %}
|
||||||
<div class="repo-file-op vh">
|
<div class="repo-file-op vh">
|
||||||
@@ -143,6 +151,13 @@
|
|||||||
</td>
|
</td>
|
||||||
|
|
||||||
<td>{{ dirent.file_size|filesizeformat }}</td>
|
<td>{{ dirent.file_size|filesizeformat }}</td>
|
||||||
|
<td>
|
||||||
|
{% if dirent.last_modified %}
|
||||||
|
{{ dirent.last_modified|translate_seahub_time }}
|
||||||
|
{% else %}
|
||||||
|
{% trans "Internal Server Error" %}
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
<td>
|
<td>
|
||||||
<div class="repo-file-op vh">
|
<div class="repo-file-op vh">
|
||||||
<div class="displayed-op">
|
<div class="displayed-op">
|
||||||
|
@@ -5,12 +5,13 @@ import re
|
|||||||
import random
|
import random
|
||||||
import stat
|
import stat
|
||||||
import urllib2
|
import urllib2
|
||||||
|
import json
|
||||||
|
|
||||||
from django.shortcuts import render_to_response
|
from django.shortcuts import render_to_response
|
||||||
from django.template import RequestContext
|
from django.template import RequestContext
|
||||||
from django.utils.hashcompat import sha_constructor
|
from django.utils.hashcompat import sha_constructor
|
||||||
|
|
||||||
from base.models import FileContributors, UserStarredFiles
|
from base.models import FileContributors, UserStarredFiles, DirFilesLastModifiedInfo
|
||||||
from django.utils.hashcompat import md5_constructor
|
from django.utils.hashcompat import md5_constructor
|
||||||
|
|
||||||
from pysearpc import SearpcError
|
from pysearpc import SearpcError
|
||||||
@@ -376,22 +377,25 @@ def get_file_contributors_from_revisions(repo_id, file_path):
|
|||||||
commits = []
|
commits = []
|
||||||
try:
|
try:
|
||||||
commits = seafserv_threaded_rpc.list_file_revisions(repo_id, file_path, -1)
|
commits = seafserv_threaded_rpc.list_file_revisions(repo_id, file_path, -1)
|
||||||
except SearpcError:
|
except SearpcError, e:
|
||||||
return []
|
return [], 0, ''
|
||||||
|
|
||||||
|
if not commits:
|
||||||
|
return [], 0, ''
|
||||||
|
|
||||||
# Commits are already sorted by date, so the user list is also sorted.
|
# Commits are already sorted by date, so the user list is also sorted.
|
||||||
users = [ commit.creator_name for commit in commits if commit.creator_name ]
|
users = [ commit.creator_name for commit in commits if commit.creator_name ]
|
||||||
|
|
||||||
# Remove duplicate elements in a list
|
# Remove duplicate elements in a list
|
||||||
ret = []
|
email_list = []
|
||||||
for user in users:
|
for user in users:
|
||||||
if user not in ret:
|
if user not in email_list:
|
||||||
ret.append(user)
|
email_list.append(user)
|
||||||
|
|
||||||
return ret, commits[0].ctime, commits[0].id
|
return email_list, commits[0].ctime, commits[0].id
|
||||||
|
|
||||||
def get_file_contributors(repo_id, file_path, file_path_hash, file_id):
|
def get_file_contributors(repo_id, file_path, file_path_hash, file_id):
|
||||||
"""Get file contributors list and last modfied time from database cache.
|
"""Get file contributors list and last modified time from database cache.
|
||||||
If not found in cache, try to get it from seaf-server.
|
If not found in cache, try to get it from seaf-server.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@@ -414,7 +418,7 @@ def get_file_contributors(repo_id, file_path, file_path_hash, file_id):
|
|||||||
# has no cache yet
|
# has no cache yet
|
||||||
contributors, last_modified, last_commit_id = get_file_contributors_from_revisions (repo_id, file_path)
|
contributors, last_modified, last_commit_id = get_file_contributors_from_revisions (repo_id, file_path)
|
||||||
if not contributors:
|
if not contributors:
|
||||||
return [], 0
|
return [], 0, ''
|
||||||
emails = ','.join(contributors)
|
emails = ','.join(contributors)
|
||||||
file_contributors = FileContributors(repo_id=repo_id,
|
file_contributors = FileContributors(repo_id=repo_id,
|
||||||
file_id=file_id,
|
file_id=file_id,
|
||||||
@@ -634,3 +638,66 @@ def get_dir_starred_files(email, repo_id, parent_dir, org_id=-1):
|
|||||||
path__startswith=parent_dir,
|
path__startswith=parent_dir,
|
||||||
org_id=org_id)
|
org_id=org_id)
|
||||||
return [ f.path for f in starred_files ]
|
return [ f.path for f in starred_files ]
|
||||||
|
|
||||||
|
def calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id):
|
||||||
|
try:
|
||||||
|
ret_list = seafserv_threaded_rpc.calc_files_last_modified(repo_id, parent_dir.encode('utf-8'))
|
||||||
|
except:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
# ret_list is like:
|
||||||
|
# [
|
||||||
|
# {'file_name': 'xxx', 'last_modified': t1}
|
||||||
|
# {'file_name': 'yyy', 'last_modified': t2}
|
||||||
|
# ]
|
||||||
|
# and we transform it to:
|
||||||
|
# {'xxx': t1, 'yyy': t2}
|
||||||
|
last_modified_info = {}
|
||||||
|
for entry in ret_list:
|
||||||
|
key = entry.file_name
|
||||||
|
value = entry.last_modified
|
||||||
|
last_modified_info[key] = value
|
||||||
|
|
||||||
|
info = DirFilesLastModifiedInfo(repo_id=repo_id,
|
||||||
|
parent_dir=parent_dir,
|
||||||
|
parent_dir_hash=parent_dir_hash,
|
||||||
|
dir_id=dir_id,
|
||||||
|
last_modified_info=json.dumps(last_modified_info))
|
||||||
|
info.save()
|
||||||
|
|
||||||
|
return last_modified_info
|
||||||
|
|
||||||
|
def get_dir_files_last_modified(repo_id, parent_dir):
|
||||||
|
'''Calc the last modified time of all the files under the directory
|
||||||
|
<parent_dir> of the repo <repo_id>. Return a dict whose keys are the file
|
||||||
|
names and values are their corresponding last modified timestamps.
|
||||||
|
|
||||||
|
'''
|
||||||
|
dir_id = seafserv_threaded_rpc.get_dir_id_by_path(repo_id, parent_dir)
|
||||||
|
parent_dir_hash = calc_file_path_hash(parent_dir)
|
||||||
|
if not dir_id:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
info = DirFilesLastModifiedInfo.objects.get(repo_id=repo_id,
|
||||||
|
parent_dir_hash=parent_dir_hash)
|
||||||
|
except DirFilesLastModifiedInfo.DoesNotExist:
|
||||||
|
# no cache yet
|
||||||
|
return calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id)
|
||||||
|
else:
|
||||||
|
# cache exist
|
||||||
|
if info.dir_id != dir_id:
|
||||||
|
# cache is outdated
|
||||||
|
info.delete()
|
||||||
|
return calc_dir_files_last_modified(repo_id, parent_dir, parent_dir_hash, dir_id)
|
||||||
|
else:
|
||||||
|
# cache is valid
|
||||||
|
return json.loads(info.last_modified_info)
|
||||||
|
|
||||||
|
def calc_file_path_hash(path, bits=12):
|
||||||
|
if isinstance(path, unicode):
|
||||||
|
path = path.encode('UTF-8')
|
||||||
|
|
||||||
|
path_hash = md5_constructor(urllib2.quote(path)).hexdigest()[:bits]
|
||||||
|
|
||||||
|
return path_hash
|
||||||
|
10
views.py
10
views.py
@@ -71,7 +71,8 @@ from utils import render_permission_error, render_error, list_to_string, \
|
|||||||
gen_file_get_url, string2list, MAX_INT, \
|
gen_file_get_url, string2list, MAX_INT, \
|
||||||
gen_file_upload_url, check_and_get_org_by_repo, \
|
gen_file_upload_url, check_and_get_org_by_repo, \
|
||||||
get_file_contributors, EVENTS_ENABLED, get_user_events, get_org_user_events, \
|
get_file_contributors, EVENTS_ENABLED, get_user_events, get_org_user_events, \
|
||||||
get_starred_files, star_file, unstar_file, is_file_starred, get_dir_starred_files
|
get_starred_files, star_file, unstar_file, is_file_starred, get_dir_starred_files, \
|
||||||
|
get_dir_files_last_modified
|
||||||
try:
|
try:
|
||||||
from settings import DOCUMENT_CONVERTOR_ROOT
|
from settings import DOCUMENT_CONVERTOR_ROOT
|
||||||
if DOCUMENT_CONVERTOR_ROOT[-1:] != '/':
|
if DOCUMENT_CONVERTOR_ROOT[-1:] != '/':
|
||||||
@@ -168,11 +169,15 @@ def get_repo_dirents(request, repo_id, commit, path):
|
|||||||
org_id = request.user.org['org_id']
|
org_id = request.user.org['org_id']
|
||||||
starred_files = get_dir_starred_files(request.user.username, repo_id, path, org_id)
|
starred_files = get_dir_starred_files(request.user.username, repo_id, path, org_id)
|
||||||
|
|
||||||
|
last_modified_info = get_dir_files_last_modified(repo_id, path)
|
||||||
|
|
||||||
fileshares = FileShare.objects.filter(repo_id=repo_id).filter(username=request.user.username)
|
fileshares = FileShare.objects.filter(repo_id=repo_id).filter(username=request.user.username)
|
||||||
http_or_https = request.is_secure() and 'https' or 'http'
|
http_or_https = request.is_secure() and 'https' or 'http'
|
||||||
domain = RequestSite(request).domain
|
domain = RequestSite(request).domain
|
||||||
|
|
||||||
for dirent in dirs:
|
for dirent in dirs:
|
||||||
|
dirent.last_modified = last_modified_info.get(dirent.obj_name, 0)
|
||||||
|
|
||||||
if stat.S_ISDIR(dirent.props.mode):
|
if stat.S_ISDIR(dirent.props.mode):
|
||||||
dir_list.append(dirent)
|
dir_list.append(dirent)
|
||||||
else:
|
else:
|
||||||
@@ -188,7 +193,6 @@ def get_repo_dirents(request, repo_id, commit, path):
|
|||||||
dirent.sharelink = '%s://%s%sf/%s/' % (http_or_https, domain, settings.SITE_ROOT, share.token)
|
dirent.sharelink = '%s://%s%sf/%s/' % (http_or_https, domain, settings.SITE_ROOT, share.token)
|
||||||
dirent.sharetoken = share.token
|
dirent.sharetoken = share.token
|
||||||
break
|
break
|
||||||
|
|
||||||
dir_list.sort(lambda x, y : cmp(x.obj_name.lower(),
|
dir_list.sort(lambda x, y : cmp(x.obj_name.lower(),
|
||||||
y.obj_name.lower()))
|
y.obj_name.lower()))
|
||||||
file_list.sort(lambda x, y : cmp(x.obj_name.lower(),
|
file_list.sort(lambda x, y : cmp(x.obj_name.lower(),
|
||||||
@@ -1319,7 +1323,7 @@ def repo_view_file(request, repo_id):
|
|||||||
comments = FileComment.objects.filter(file_path_hash=file_path_hash, repo_id=repo_id)
|
comments = FileComment.objects.filter(file_path_hash=file_path_hash, repo_id=repo_id)
|
||||||
|
|
||||||
contributors, last_modified, last_commit_id = get_file_contributors(repo_id, path.encode('utf-8'), file_path_hash, obj_id)
|
contributors, last_modified, last_commit_id = get_file_contributors(repo_id, path.encode('utf-8'), file_path_hash, obj_id)
|
||||||
latest_contributor = contributors[0]
|
latest_contributor = contributors[0] if contributors else None
|
||||||
|
|
||||||
if len(groups) > 1:
|
if len(groups) > 1:
|
||||||
ctx = {}
|
ctx = {}
|
||||||
|
Reference in New Issue
Block a user