 |
- {this.renderRepoName()} |
+ {this.props.isWiki ?
+ {wikiName} | :
+ {this.renderRepoName()} |
+ }
{`${repo.file_count} / ${Utils.bytesToSize(repo.size)}`} |
{repo.id} |
diff --git a/frontend/src/utils/system-admin-api.js b/frontend/src/utils/system-admin-api.js
index f5150904e3..7403955d7d 100644
--- a/frontend/src/utils/system-admin-api.js
+++ b/frontend/src/utils/system-admin-api.js
@@ -201,6 +201,18 @@ class SystemAdminAPI {
return this.req.get(url, { params: params });
}
+ sysAdminListAllWikis(page, perPage, orderBy) {
+ const url = this.server + '/api/v2.1/admin/wikis/';
+ let params = {
+ page: page,
+ per_page: perPage
+ };
+ if (orderBy) {
+ params.order_by = orderBy;
+ }
+ return this.req.get(url, { params: params });
+ }
+
sysAdminSearchRepos(name, page, perPage) {
const url = this.server + '/api/v2.1/admin/search-library/';
let params = {
diff --git a/seahub/api2/endpoints/admin/wikis.py b/seahub/api2/endpoints/admin/wikis.py
new file mode 100644
index 0000000000..4cdeb702c7
--- /dev/null
+++ b/seahub/api2/endpoints/admin/wikis.py
@@ -0,0 +1,123 @@
+import logging
+
+from rest_framework.authentication import SessionAuthentication
+from rest_framework.permissions import IsAdminUser
+from rest_framework.response import Response
+from rest_framework.views import APIView
+from rest_framework import status
+from django.template.defaultfilters import filesizeformat
+from seaserv import seafile_api
+
+from seahub.api2.authentication import TokenAuthentication
+from seahub.api2.throttling import UserRateThrottle
+from seahub.api2.utils import api_error
+from seahub.utils import get_service_url
+from seahub.base.templatetags.seahub_tags import email2nickname, email2contact_email
+from seahub.group.utils import group_id_to_name
+from seahub.utils.repo import normalize_repo_status_code
+from seahub.utils.timeutils import timestamp_to_isoformat_timestr
+from seahub.wiki2.models import Wiki2Publish
+from seahub.api2.endpoints.group_owned_libraries import get_group_id_by_repo_owner
+from seahub.utils.db_api import SeafileDB
+
+
+
+logger = logging.getLogger(__name__)
+
+
+def get_wiki_info(wiki, publish_wikis_dict):
+
+ wiki_owner = seafile_api.get_repo_owner(wiki.repo_id)
+ if not wiki_owner:
+ try:
+ org_wiki_owner = seafile_api.get_org_repo_owner(wiki.repo_id)
+ except Exception:
+ org_wiki_owner = None
+ owner = wiki_owner or org_wiki_owner or ''
+ link_prefix = get_service_url().rstrip('/') + '/wiki/publish/'
+ is_published = True if wiki.repo_id in publish_wikis_dict else False
+ public_url_suffix = publish_wikis_dict.get(wiki.repo_id) if is_published else ""
+ link = link_prefix + public_url_suffix if public_url_suffix else ""
+
+ result = {}
+ result['id'] = wiki.repo_id
+ result['name'] = wiki.wiki_name
+ result['owner'] = owner
+ result['owner_email'] = owner
+ result['owner_contact_email'] = email2contact_email(owner)
+ result['size'] = wiki.size if wiki.size else 0
+ result['size_formatted'] = filesizeformat(wiki.size)
+ result['encrypted'] = wiki.encrypted
+ result['file_count'] = wiki.file_count if wiki.file_count else 0
+ result['status'] = normalize_repo_status_code(wiki.status)
+ result['last_modified'] = timestamp_to_isoformat_timestr(wiki.last_modified)
+ result['public_url_suffix'] = public_url_suffix
+ result['public_url'] = link
+ result['is_published'] = is_published
+
+ if '@seafile_group' in owner:
+ group_id = get_group_id_by_repo_owner(owner)
+ result['group_name'] = group_id_to_name(group_id)
+ result['owner_name'] = group_id_to_name(group_id)
+ else:
+ result['owner_name'] = email2nickname(owner)
+
+ return result
+
+
+class AdminWikis(APIView):
+ authentication_classes = (TokenAuthentication, SessionAuthentication)
+ throttle_classes = (UserRateThrottle,)
+ permission_classes = (IsAdminUser,)
+
+ def get(self, request):
+ """ List 'all' wiki
+
+ Permission checking:
+ 1. only admin can perform this action.
+ """
+
+ if not request.user.admin_permissions.can_manage_library():
+ return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.')
+
+ order_by = request.GET.get('order_by', '').lower().strip()
+ if order_by and order_by not in ('size', 'file_count'):
+ error_msg = 'order_by invalid.'
+ return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
+
+ # get wikis by page
+ try:
+ current_page = int(request.GET.get('page', '1'))
+ per_page = int(request.GET.get('per_page', '100'))
+ except ValueError:
+ current_page = 1
+ per_page = 100
+
+ start = (current_page - 1) * per_page
+ limit = per_page + 1
+ seafile_db = SeafileDB()
+ all_wikis = seafile_db.get_all_wikis(start, limit, order_by)
+ if len(all_wikis) > per_page:
+ all_wikis = all_wikis[:per_page]
+ has_next_page = True
+ else:
+ has_next_page = False
+
+ # get publish wiki
+ wiki_ids = [w.repo_id for w in all_wikis]
+ publish_wikis_dict = {}
+ published_wikis = Wiki2Publish.objects.filter(repo_id__in=wiki_ids)
+ for w in published_wikis:
+ publish_wikis_dict[w.repo_id] = w.publish_url
+
+ all_fmt_wikis = []
+ for wiki in all_wikis:
+ repo_info = get_wiki_info(wiki, publish_wikis_dict)
+ all_fmt_wikis.append(repo_info)
+
+ page_info = {
+ 'has_next_page': has_next_page,
+ 'current_page': current_page
+ }
+
+ return Response({"page_info": page_info, "wikis": all_fmt_wikis})
diff --git a/seahub/api2/endpoints/wiki2.py b/seahub/api2/endpoints/wiki2.py
index a7235662fe..83d1aadb47 100644
--- a/seahub/api2/endpoints/wiki2.py
+++ b/seahub/api2/endpoints/wiki2.py
@@ -22,7 +22,7 @@ from django.utils.translation import gettext as _
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, is_wiki_repo
-from seahub.utils import HAS_FILE_SEARCH, HAS_FILE_SEASEARCH
+from seahub.utils import HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, get_service_url
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
from seahub.search.utils import search_wikis, ai_search_wikis
from seahub.utils.db_api import SeafileDB
@@ -62,7 +62,7 @@ HTTP_520_OPERATION_FAILED = 520
logger = logging.getLogger(__name__)
-def _merge_wiki_in_groups(group_wikis, publish_wiki_ids):
+def _merge_wiki_in_groups(group_wikis, publish_wikis_dict, link_prefix):
group_ids = [gw.group_id for gw in group_wikis]
group_id_wikis_map = {key: [] for key in group_ids}
@@ -75,11 +75,16 @@ def _merge_wiki_in_groups(group_wikis, publish_wiki_ids):
owner_nickname = group_id_to_name(group_id)
else:
owner_nickname = email2nickname(owner)
+ is_published = True if publish_wikis_dict.get(gw.id) else False
+ public_url_suffix = publish_wikis_dict.get(gw.id) if is_published else ""
+ link = link_prefix + public_url_suffix if public_url_suffix else ""
repo_info = {
"type": "group",
"permission": gw.permission,
"owner_nickname": owner_nickname,
- "is_published": True if wiki.repo_id in publish_wiki_ids else False
+ "public_url_suffix": public_url_suffix,
+ "public_url": link,
+ "is_published": is_published
}
wiki_info.update(repo_info)
group_id = gw.group_id
@@ -111,21 +116,27 @@ class Wikis2View(APIView):
shared_wikis = [r for r in shared if is_wiki_repo(r)]
group_wikis = [r for r in groups if is_wiki_repo(r)]
wiki_ids = [w.repo_id for w in owned_wikis + shared_wikis + group_wikis]
- publish_wiki_ids = []
+ link_prefix = get_service_url().rstrip('/') + '/wiki/publish/'
+ publish_wikis_dict = {}
published_wikis = Wiki2Publish.objects.filter(repo_id__in=wiki_ids)
for w in published_wikis:
- publish_wiki_ids.append(w.repo_id)
+ publish_wikis_dict[w.repo_id] = w.publish_url
wiki_list = []
for r in owned_wikis:
r.owner = username
r.permission = 'rw'
wiki = Wiki(r)
wiki_info = wiki.to_dict()
+ is_published = True if publish_wikis_dict.get(r.id) else False
+ public_url_suffix = publish_wikis_dict.get(r.id) if is_published else ""
+ link = link_prefix + public_url_suffix if public_url_suffix else ""
repo_info = {
"type": "mine",
"permission": 'rw',
"owner_nickname": email2nickname(username),
- "is_published": True if wiki_info['repo_id'] in publish_wiki_ids else False
+ "public_url_suffix": public_url_suffix,
+ "public_url": link,
+ "is_published": is_published
}
wiki_info.update(repo_info)
wiki_list.append(wiki_info)
@@ -140,11 +151,16 @@ class Wikis2View(APIView):
else:
owner_nickname = email2nickname(owner)
wiki_info = wiki.to_dict()
+ is_published = True if publish_wikis_dict.get(r.id) else False
+ public_url_suffix = publish_wikis_dict.get(r.id) if is_published else ""
+ link = link_prefix + public_url_suffix if public_url_suffix else ""
repo_info = {
"type": "shared",
"permission": r.permission,
"owner_nickname": owner_nickname,
- "is_published": True if wiki_info['repo_id'] in publish_wiki_ids else False
+ "public_url_suffix": public_url_suffix,
+ "public_url": link,
+ "is_published": is_published
}
wiki_info.update(repo_info)
wiki_list.append(wiki_info)
@@ -165,7 +181,7 @@ class Wikis2View(APIView):
r.owner = r.user
group_wiki_list = []
- group_id_wikis_map = _merge_wiki_in_groups(group_wikis, publish_wiki_ids)
+ group_id_wikis_map = _merge_wiki_in_groups(group_wikis, publish_wikis_dict, link_prefix)
for group_obj in user_wiki_groups:
group_wiki = {
'group_name': group_obj.group_name,
diff --git a/seahub/urls.py b/seahub/urls.py
index 3b8e86711b..4220eaba9b 100644
--- a/seahub/urls.py
+++ b/seahub/urls.py
@@ -148,6 +148,7 @@ from seahub.api2.endpoints.admin.users import AdminUsers, AdminUser, AdminUserRe
from seahub.api2.endpoints.admin.device_trusted_ip import AdminDeviceTrustedIP
from seahub.api2.endpoints.admin.libraries import AdminLibraries, AdminLibrary, \
AdminSearchLibrary
+from seahub.api2.endpoints.admin.wikis import AdminWikis
from seahub.api2.endpoints.admin.library_dirents import AdminLibraryDirents, AdminLibraryDirent
from seahub.api2.endpoints.admin.system_library import AdminSystemLibrary, \
AdminSystemLibraryUploadLink
@@ -733,6 +734,9 @@ urlpatterns = [
re_path(r'^api/v2.1/admin/invitations/$', AdminInvitations.as_view(), name='api-v2.1-admin-invitations'),
re_path(r'^api/v2.1/admin/invitations/(?P[a-f0-9]{32})/$', AdminInvitation.as_view(), name='api-v2.1-admin-invitation'),
+ ## admin:: wiki
+ re_path(r'^api/v2.1/admin/wikis/$', AdminWikis.as_view(), name='api-v2.1-admin-wikis'),
+
re_path(r'^wikis/(?P[^/]+)/$', wiki_view, name='wiki'),
re_path(r'^wiki/publish/(?P[-0-9a-zA-Z]+)/$', wiki_publish_view, name='wiki-publish'),
re_path(r'^wiki/file_revisions/(?P[^/]+)/$', wiki_history_view, name='wiki-history'),
@@ -821,6 +825,7 @@ urlpatterns = [
path('sys/notifications/', sysadmin_react_fake_view, name="sys_notifications"),
path('sys/web-settings/', sysadmin_react_fake_view, name="sys_web_settings"),
path('sys/all-libraries/', sysadmin_react_fake_view, name="sys_all_libraries"),
+ path('sys/all-wikis/', sysadmin_react_fake_view, name="sys_all_wikis"),
path('sys/search-libraries/', sysadmin_react_fake_view, name="sys_search_libraries"),
path('sys/system-library/', sysadmin_react_fake_view, name="sys_system_library"),
path('sys/trash-libraries/', sysadmin_react_fake_view, name="sys_trash_libraries"),
diff --git a/seahub/utils/db_api.py b/seahub/utils/db_api.py
index ba6d72f1a0..a53ab9aaa6 100644
--- a/seahub/utils/db_api.py
+++ b/seahub/utils/db_api.py
@@ -13,6 +13,18 @@ class RepoTrash(object):
self.size = kwargs.get('size')
self.del_time = kwargs.get('del_time')
+class WikiInfo(object):
+ def __init__(self, **kwargs):
+ self.repo_id = kwargs.get('repo_id')
+ self.wiki_name = kwargs.get('wiki_name')
+ self.owner_id = kwargs.get('owner_id')
+ self.encrypted = kwargs.get('is_encrypted')
+ self.size = kwargs.get('size')
+ self.status = kwargs.get('status')
+ self.file_count = kwargs.get('file_count')
+ self.last_modified = kwargs.get('last_modified')
+
+
class SeafileDB:
@@ -461,4 +473,78 @@ class SeafileDB:
DELETE FROM `{self.db_name}`.`RepoUserToken` where repo_id="{repo_id}" AND email="{owner}"
"""
with connection.cursor() as cursor:
- cursor.execute(sql)
\ No newline at end of file
+ cursor.execute(sql)
+
+ def get_all_wikis(self, start, limit, order_by):
+ order_by_size_sql = f"""
+ SELECT r.repo_id, i.name, o.owner_id, i.is_encrypted, s.size, i.status, c.file_count, i.update_time
+ FROM
+ `{self.db_name}`.`Repo` r
+ LEFT JOIN `{self.db_name}`.`RepoInfo` i ON r.repo_id = i.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoOwner` o ON i.repo_id = o.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoSize` s ON s.repo_id = r.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoFileCount` c ON r.repo_id = c.repo_id
+ WHERE
+ i.type = 'wiki'
+ ORDER BY
+ s.size DESC
+ LIMIT {limit} OFFSET {start}
+ """
+ order_by_filecount_sql = f"""
+ SELECT r.repo_id, i.name, o.owner_id, i.is_encrypted, s.size, i.status, c.file_count, i.update_time
+ FROM
+ `{self.db_name}`.`Repo` r
+ LEFT JOIN `{self.db_name}`.`RepoInfo` i ON r.repo_id = i.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoOwner` o ON i.repo_id = o.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoSize` s ON s.repo_id = r.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoFileCount` c ON r.repo_id = c.repo_id
+ WHERE
+ i.type = 'wiki'
+ ORDER BY
+ c.file_count DESC
+ LIMIT {limit} OFFSET {start}
+ """
+ sql = f"""
+ SELECT r.repo_id, i.name, o.owner_id, i.is_encrypted, s.size, i.status, c.file_count, i.update_time
+ FROM
+ `{self.db_name}`.`Repo` r
+ LEFT JOIN `{self.db_name}`.`RepoInfo` i ON r.repo_id = i.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoOwner` o ON r.repo_id = o.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoSize` s ON r.repo_id = s.repo_id
+ LEFT JOIN `{self.db_name}`.`RepoFileCount` c ON r.repo_id = c.repo_id
+ WHERE
+ i.type = 'wiki'
+ LIMIT {limit} OFFSET {start}
+ """
+
+ with connection.cursor() as cursor:
+ wikis = []
+ if order_by == 'size':
+ cursor.execute(order_by_size_sql)
+
+ elif order_by == 'file_count':
+ cursor.execute(order_by_filecount_sql)
+ else:
+ cursor.execute(sql)
+ for item in cursor.fetchall():
+ repo_id = item[0]
+ wiki_name = item[1]
+ owner_id = item[2]
+ is_encrypted = item[3]
+ size = item[4]
+ status = item[5]
+ file_count = item[6]
+ last_modified = item[7]
+ params = {
+ 'repo_id': repo_id,
+ 'wiki_name': wiki_name,
+ 'owner_id': owner_id,
+ 'is_encrypted': is_encrypted,
+ 'size': size,
+ 'status': status,
+ 'file_count': file_count,
+ 'last_modified': last_modified
+ }
+ wiki_info = WikiInfo(**params)
+ wikis.append(wiki_info)
+ return wikis
|