mirror of
https://github.com/haiwen/seahub.git
synced 2025-09-22 03:47:09 +00:00
Fixed useless auto merge commit message
This commit is contained in:
@@ -24,6 +24,7 @@
|
|||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
{% for commit in commits %}
|
{% for commit in commits %}
|
||||||
|
{% if commit.show %}
|
||||||
<tr>
|
<tr>
|
||||||
<td class="time">{{ commit.ctime|translate_seahub_time }}</td>
|
<td class="time">{{ commit.ctime|translate_seahub_time }}</td>
|
||||||
<td>
|
<td>
|
||||||
@@ -45,6 +46,7 @@
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</table>
|
</table>
|
||||||
<div id="paginator">
|
<div id="paginator">
|
||||||
|
@@ -301,6 +301,37 @@ def get_file_revision_id_size (commit_id, path):
|
|||||||
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
|
def new_merge_with_no_conflict(commit):
|
||||||
|
"""Check whether a commit is a new merge, and no conflict.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- `commit`:
|
||||||
|
"""
|
||||||
|
if commit.second_parent_id is not None and commit.new_merge is True and \
|
||||||
|
commit.conflict is False:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_commit_before_new_merge(commit):
|
||||||
|
"""Traverse parents of ``commit``, and get a commit which is not a new merge.
|
||||||
|
|
||||||
|
Pre-condition: ``commit`` must be a new merge and not conflict.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- `commit`:
|
||||||
|
"""
|
||||||
|
assert new_merge_with_no_conflict(commit) is True
|
||||||
|
|
||||||
|
while(new_merge_with_no_conflict(commit)):
|
||||||
|
p1 = seafserv_threaded_rpc.get_commit(commit.parent_id)
|
||||||
|
p2 = seafserv_threaded_rpc.get_commit(commit.second_parent_id)
|
||||||
|
commit = p1 if p1.ctime > p2.ctime else p2
|
||||||
|
|
||||||
|
assert new_merge_with_no_conflict(commit) is False
|
||||||
|
|
||||||
|
return commit
|
||||||
|
|
||||||
def gen_inner_file_get_url(token, filename):
|
def gen_inner_file_get_url(token, filename):
|
||||||
"""Generate inner httpserver file url.
|
"""Generate inner httpserver file url.
|
||||||
|
|
||||||
@@ -446,7 +477,12 @@ if EVENTS_CONFIG_FILE:
|
|||||||
duplicate = False
|
duplicate = False
|
||||||
for e2 in valid_events:
|
for e2 in valid_events:
|
||||||
if _same_events(e1, e2): duplicate = True; break
|
if _same_events(e1, e2): duplicate = True; break
|
||||||
if not duplicate:
|
|
||||||
|
new_merge = False
|
||||||
|
if hasattr(e1, 'commit') and new_merge_with_no_conflict(e1.commit):
|
||||||
|
new_merge = True
|
||||||
|
|
||||||
|
if not duplicate and not new_merge:
|
||||||
valid_events.append(e1)
|
valid_events.append(e1)
|
||||||
total_used = total_used + 1
|
total_used = total_used + 1
|
||||||
if len(valid_events) == count:
|
if len(valid_events) == count:
|
||||||
|
@@ -78,7 +78,7 @@ from seahub.utils import render_permission_error, render_error, list_to_string,
|
|||||||
gen_file_get_url, string2list, MAX_INT, IS_EMAIL_CONFIGURED, \
|
gen_file_get_url, string2list, MAX_INT, IS_EMAIL_CONFIGURED, \
|
||||||
gen_file_upload_url, check_and_get_org_by_repo, \
|
gen_file_upload_url, check_and_get_org_by_repo, \
|
||||||
EVENTS_ENABLED, get_user_events, get_org_user_events, show_delete_days, \
|
EVENTS_ENABLED, get_user_events, get_org_user_events, show_delete_days, \
|
||||||
TRAFFIC_STATS_ENABLED, get_user_traffic_stat
|
TRAFFIC_STATS_ENABLED, get_user_traffic_stat, new_merge_with_no_conflict
|
||||||
from seahub.utils.paginator import get_page_range
|
from seahub.utils.paginator import get_page_range
|
||||||
from seahub.utils.star import get_dir_starred_files
|
from seahub.utils.star import get_dir_starred_files
|
||||||
from seahub.views.modules import MOD_PERSONAL_WIKI, \
|
from seahub.views.modules import MOD_PERSONAL_WIKI, \
|
||||||
@@ -693,6 +693,8 @@ def repo_history(request, repo_id):
|
|||||||
commits_all = get_commits(repo_id, per_page * (current_page -1),
|
commits_all = get_commits(repo_id, per_page * (current_page -1),
|
||||||
per_page + 1)
|
per_page + 1)
|
||||||
commits = commits_all[:per_page]
|
commits = commits_all[:per_page]
|
||||||
|
for c in commits:
|
||||||
|
c.show = False if new_merge_with_no_conflict(c) else True
|
||||||
|
|
||||||
if len(commits_all) == per_page + 1:
|
if len(commits_all) == per_page + 1:
|
||||||
page_next = True
|
page_next = True
|
||||||
|
@@ -32,7 +32,8 @@ from seahub.views.repo import get_nav_path, get_fileshare, get_dir_share_link, \
|
|||||||
import seahub.settings as settings
|
import seahub.settings as settings
|
||||||
from seahub.signals import repo_deleted
|
from seahub.signals import repo_deleted
|
||||||
from seahub.utils import check_filename_with_rename, EMPTY_SHA1, gen_block_get_url, \
|
from seahub.utils import check_filename_with_rename, EMPTY_SHA1, gen_block_get_url, \
|
||||||
check_and_get_org_by_repo, TRAFFIC_STATS_ENABLED, get_user_traffic_stat
|
check_and_get_org_by_repo, TRAFFIC_STATS_ENABLED, get_user_traffic_stat,\
|
||||||
|
new_merge_with_no_conflict, get_commit_before_new_merge
|
||||||
from seahub.utils.star import star_file, unstar_file
|
from seahub.utils.star import star_file, unstar_file
|
||||||
|
|
||||||
# Get an instance of a logger
|
# Get an instance of a logger
|
||||||
@@ -950,6 +951,8 @@ def get_current_commit(request, repo_id):
|
|||||||
err_msg = _(u'Error: no head commit id')
|
err_msg = _(u'Error: no head commit id')
|
||||||
return HttpResponse(json.dumps({'error': err_msg}),
|
return HttpResponse(json.dumps({'error': err_msg}),
|
||||||
status=500, content_type=content_type)
|
status=500, content_type=content_type)
|
||||||
|
if new_merge_with_no_conflict(head_commit):
|
||||||
|
head_commit = get_commit_before_new_merge(head_commit)
|
||||||
|
|
||||||
ctx = {
|
ctx = {
|
||||||
'repo': repo,
|
'repo': repo,
|
||||||
|
@@ -21,7 +21,8 @@ from seahub.views import gen_path_link, get_repo_dirents, \
|
|||||||
|
|
||||||
from seahub.utils import get_ccnetapplet_root, gen_file_upload_url, \
|
from seahub.utils import get_ccnetapplet_root, gen_file_upload_url, \
|
||||||
get_httpserver_root, gen_dir_share_link, gen_shared_upload_link, \
|
get_httpserver_root, gen_dir_share_link, gen_shared_upload_link, \
|
||||||
get_max_upload_file_size
|
get_max_upload_file_size, new_merge_with_no_conflict, \
|
||||||
|
get_commit_before_new_merge
|
||||||
from seahub.settings import ENABLE_SUB_LIBRARY, FORCE_SERVER_CRYPTO
|
from seahub.settings import ENABLE_SUB_LIBRARY, FORCE_SERVER_CRYPTO
|
||||||
|
|
||||||
# Get an instance of a logger
|
# Get an instance of a logger
|
||||||
@@ -242,6 +243,9 @@ def render_repo(request, repo):
|
|||||||
head_commit = get_commit(repo.head_cmmt_id)
|
head_commit = get_commit(repo.head_cmmt_id)
|
||||||
if not head_commit:
|
if not head_commit:
|
||||||
raise Http404
|
raise Http404
|
||||||
|
if new_merge_with_no_conflict(head_commit):
|
||||||
|
head_commit = get_commit_before_new_merge(head_commit)
|
||||||
|
|
||||||
repo_size = get_repo_size(repo.id)
|
repo_size = get_repo_size(repo.id)
|
||||||
no_quota = is_no_quota(repo.id)
|
no_quota = is_no_quota(repo.id)
|
||||||
search_repo_id = None if repo.encrypted else repo.id
|
search_repo_id = None if repo.encrypted else repo.id
|
||||||
|
Reference in New Issue
Block a user