1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-08-31 14:42:10 +00:00

Wiki page trash (#6423)

* add wiki page trash

* update css

* update ui

* redesign

* update ui

* optimize code

* Update views.py

* Update models.py

* update notes

* Update mysql.sql

* change wiki trash UI

* redesign2

* update

* update

---------

Co-authored-by: 孙永强 <11704063+s-yongqiang@user.noreply.gitee.com>
Co-authored-by: r350178982 <32759763+r350178982@users.noreply.github.com>
Co-authored-by: Michael An <2331806369@qq.com>
This commit is contained in:
awu0403
2024-08-16 10:17:54 +08:00
committed by GitHub
parent 2b1c9cc8df
commit 6d0680f4b4
16 changed files with 800 additions and 73 deletions

View File

@@ -0,0 +1,86 @@
import React from 'react';
import PropTypes from 'prop-types';
import { Modal, ModalHeader, ModalBody, ModalFooter } from 'reactstrap';
import CreatableSelect from 'react-select/creatable';
import { gettext } from '../../utils/constants';
import { Utils } from '../../utils/utils';
import toaster from '../toast';
import wikiAPI from '../../utils/wiki-api';
const propTypes = {
wikiId: PropTypes.string.isRequired,
refreshTrash: PropTypes.func.isRequired,
toggleDialog: PropTypes.func.isRequired
};
class WikiCleanTrash extends React.Component {
constructor(props) {
super(props);
this.options = [
{ label: gettext('3 days ago'), value: 3 },
{ label: gettext('1 week ago'), value: 7 },
{ label: gettext('1 month ago'), value: 30 },
{ label: gettext('all'), value: 0 }
];
this.state = {
inputValue: this.options[0],
submitBtnDisabled: false
};
}
handleInputChange = (value) => {
this.setState({
inputValue: value
});
};
formSubmit = () => {
const inputValue = this.state.inputValue;
const { wikiId } = this.props;
this.setState({
submitBtnDisabled: true
});
wikiAPI.cleanWikiTrash(wikiId, inputValue.value).then((res) => {
toaster.success(gettext('Clean succeeded.'));
this.props.refreshTrash();
this.props.toggleDialog();
}).catch((error) => {
let errorMsg = Utils.getErrorMsg(error);
this.setState({
formErrorMsg: errorMsg,
submitBtnDisabled: false
});
});
};
render() {
const { formErrorMsg } = this.state;
return (
<Modal isOpen={true} centered={true} toggle={this.props.toggleDialog}>
<ModalHeader toggle={this.props.toggleDialog}>{gettext('Clean')}</ModalHeader>
<ModalBody>
<React.Fragment>
<p>{gettext('Clear files in trash and history')}</p>
<CreatableSelect
defaultValue={this.options[0]}
options={this.options}
autoFocus={false}
onChange={this.handleInputChange}
placeholder=''
/>
{formErrorMsg && <p className="error m-0 mt-2">{formErrorMsg}</p>}
</React.Fragment>
</ModalBody>
<ModalFooter>
<button className="btn btn-primary" disabled={this.state.submitBtnDisabled} onClick={this.formSubmit}>{gettext('Submit')}</button>
</ModalFooter>
</Modal>
);
}
}
WikiCleanTrash.propTypes = propTypes;
export default WikiCleanTrash;

View File

@@ -0,0 +1,45 @@
.trash-dialog {
max-width: 1100px;
}
.trash-dialog .modal-header {
align-items: center;
display: flex;
}
.trash-dialog .modal-header .but-contral {
margin-left: auto;
}
.trash-dialog .modal-header .clean {
height: 30px;
line-height: 28px;
padding: 0 0.5rem;
}
.trash-dialog .modal-header .trash-dialog-close-icon {
color: #000;
opacity: 0.5;
font-weight: 700;
cursor: pointer;
}
.trash-dialog .modal-header .trash-dialog-close-icon:hover {
opacity: 0.75;
}
.trash-dialog .modal-body {
height: 500px;
overflow-y: auto;
}
.trash-dialog .modal-body .more {
background: #efefef;
border: 0;
color: #777;
}
.trash-dialog .modal-body .more:hover {
color: #000;
background: #dfdfdf;
}

View File

@@ -279,6 +279,7 @@
color: #212529;
}
.wiki-nav .wiki2-trash .sf3-font,
.wiki-nav .wiki-page-item .sf3-font.sf3-font-enlarge,
.wiki-nav .wiki-page-item .seafile-multicolor-icon-more-level {
color: #666;
@@ -289,3 +290,16 @@
height: 1em;
font-size: 16px;
}
.wiki-nav .wiki2-trash {
height: 32px;
display: flex;
align-items: center;
padding-left: 10px;
margin-top: 16px;
cursor: pointer;
}
.wiki-nav .wiki2-trash:hover {
background-color: #EFEFED;
}

View File

@@ -239,6 +239,7 @@ class Wiki extends Component {
onCloseSide={this.onCloseSide}
config={this.state.config}
updateWikiConfig={this.updateWikiConfig}
getWikiConfig={this.getWikiConfig}
setCurrentPage={this.setCurrentPage}
currentPageId={this.state.currentPageId}
onUpdatePage={this.onUpdatePage}

View File

@@ -23,15 +23,5 @@ export default class WikiConfig {
}
}
traversePage({ children: this.navigation });
for (let key in page_id_map) {
if (page_id_map[key] === false) {
const page = this.pages.find(item => item.id === key);
this.navigation.push({
id: page.id,
type: 'page',
children: page.children || [],
});
}
}
}
}

View File

@@ -12,6 +12,7 @@ import { isObjectNotEmpty } from './utils';
import wikiAPI from '../../utils/wiki-api';
import { Utils } from '../../utils/utils';
import WikiExternalOperations from './wiki-external-operations';
import WikiTrashDialog from './wiki-trash-dialog';
import './side-panel.css';
@@ -22,13 +23,19 @@ const propTypes = {
isLoading: PropTypes.bool.isRequired,
config: PropTypes.object.isRequired,
updateWikiConfig: PropTypes.func.isRequired,
getWikiConfig: PropTypes.func.isRequired,
setCurrentPage: PropTypes.func.isRequired,
currentPageId: PropTypes.string,
onUpdatePage: PropTypes.func.isRequired,
};
class SidePanel extends Component {
constructor(props) {
super(props);
this.state = {
isShowTrashDialog: false,
};
}
confirmDeletePage = (pageId) => {
const config = deepCopy(this.props.config);
const { pages } = config;
@@ -93,6 +100,10 @@ class SidePanel extends Component {
this.props.updateWikiConfig(config);
};
toggelTrashDialog = () => {
this.setState({ 'isShowTrashDialog': !this.state.isShowTrashDialog });
};
renderWikiNav = () => {
const { config, onUpdatePage } = this.props;
const { pages, navigation } = config;
@@ -112,6 +123,7 @@ class SidePanel extends Component {
duplicatePage={this.duplicatePage}
currentPageId={this.props.currentPageId}
addPageInside={this.addPageInside}
toggelTrashDialog={this.toggelTrashDialog}
/>
}
</div>
@@ -156,9 +168,16 @@ class SidePanel extends Component {
</UncontrolledTooltip>
</div>
<div className="wiki2-side-nav">
{isLoading ? <Loading /> : this.renderWikiNav()}
{isLoading ? <Loading/> : this.renderWikiNav()}
</div>
<WikiExternalOperations onAddWikiPage={this.handleAddNewPage.bind(false)} />
<WikiExternalOperations onAddWikiPage={this.handleAddNewPage.bind(false)}/>
{this.state.isShowTrashDialog && (
<WikiTrashDialog
showTrashDialog={this.state.isShowTrashDialog}
toggleTrashDialog={this.toggelTrashDialog}
getWikiConfig={this.props.getWikiConfig}
/>
)}
</div>
);
}

View File

@@ -3,7 +3,7 @@ import PropTypes from 'prop-types';
import { DropTarget, DragLayer } from 'react-dnd';
import html5DragDropContext from './html5DragDropContext';
import DraggedPageItem from './pages/dragged-page-item';
import { repoID } from '../../../utils/constants';
import { repoID, gettext } from '../../../utils/constants';
import '../css/wiki-nav.css';
@@ -21,6 +21,7 @@ class WikiNav extends Component {
currentPageId: PropTypes.string,
addPageInside: PropTypes.func,
updateWikiConfig: PropTypes.func.isRequired,
toggelTrashDialog: PropTypes.func.isRequired,
};
constructor(props) {
@@ -107,6 +108,10 @@ class WikiNav extends Component {
{navigation.map((item, index) => {
return this.renderPage(item, index, pages.length, isOnlyOnePage, id_page_map, layerDragProps);
})}
<div className='wiki2-trash' onClick={this.props.toggelTrashDialog}>
<span className="sf3-font-recycle1 sf3-font mr-2"></span>
<span>{gettext('Trash')}</span>
</div>
</div>
);
});

View File

@@ -0,0 +1,271 @@
import React from 'react';
import PropTypes from 'prop-types';
import { Modal, ModalHeader, ModalBody } from 'reactstrap';
import moment from 'moment';
import { Utils } from '../../utils/utils';
import { gettext, wikiId } from '../../utils/constants';
import wikiAPI from '../../utils/wiki-api';
import ModalPortal from '../../components/modal-portal';
import toaster from '../../components/toast';
import Paginator from '../../components/paginator';
import WikiCleanTrash from '../../components/dialog/wiki-clean-trash';
import NavItemIcon from './common/nav-item-icon';
import '../../css/toolbar.css';
import '../../css/search.css';
import '../../css/wiki-trash-dialog.css';
const propTypes = {
showTrashDialog: PropTypes.bool.isRequired,
toggleTrashDialog: PropTypes.func.isRequired,
getWikiConfig: PropTypes.func.isRequired
};
class WikiTrashDialog extends React.Component {
constructor(props) {
super(props);
this.state = {
isLoading: true,
errorMsg: '',
items: [],
isCleanTrashDialogOpen: false,
currentPage: 1,
perPage: 100,
hasNextPage: false
};
}
componentDidMount() {
this.getItems();
}
getItems = (page) => {
wikiAPI.getWikiTrash(wikiId, page, this.state.perPage).then((res) => {
const { items, total_count } = res.data;
if (!page) {
page = 1;
}
this.setState({
currentPage: page,
hasNextPage: total_count - page * this.state.perPage > 0,
isLoading: false,
items: items,
});
});
};
resetPerPage = (perPage) => {
this.setState({
perPage: perPage
}, () => {
this.getItems(1);
});
};
cleanTrash = () => {
this.toggleCleanTrashDialog();
};
toggleCleanTrashDialog = () => {
this.setState({
isCleanTrashDialogOpen: !this.state.isCleanTrashDialogOpen
});
};
refreshTrash = () => {
this.setState({
isLoading: true,
errorMsg: '',
items: []
});
this.getItems();
};
render() {
const { showTrashDialog, toggleTrashDialog } = this.props;
const { isCleanTrashDialogOpen } = this.state;
const { isAdmin, enableUserCleanTrash, repoName } = window.wiki.config;
let title = gettext('{placeholder} Wiki Trash');
title = title.replace('{placeholder}', '<span class="op-target text-truncate mx-1">' + Utils.HTMLescape(repoName) + '</span>');
return (
<Modal className="trash-dialog" isOpen={showTrashDialog} toggle={toggleTrashDialog}>
<ModalHeader
close={
<>
<div className="but-contral">
{(isAdmin && enableUserCleanTrash) &&
<button className="btn btn-secondary clean flex-shrink-0 ml-4" onClick={this.cleanTrash}>{gettext('Clean')}</button>
}
<span aria-hidden="true" className="trash-dialog-close-icon sf3-font sf3-font-x-01 ml-4" onClick={toggleTrashDialog}></span>
</div>
</>
}
>
<div dangerouslySetInnerHTML={{ __html: title }}></div>
</ModalHeader>
<ModalBody>
<Content
data={this.state}
currentPage={this.state.currentPage}
curPerPage={this.state.perPage}
hasNextPage={this.state.hasNextPage}
getListByPage={this.getItems}
resetPerPage={this.resetPerPage}
getWikiConfig={this.props.getWikiConfig}
/>
{isCleanTrashDialogOpen &&
<ModalPortal>
<WikiCleanTrash
wikiId={wikiId}
refreshTrash={this.refreshTrash}
toggleDialog={this.toggleCleanTrashDialog}
/>
</ModalPortal>
}
</ModalBody>
</Modal>
);
}
}
class Content extends React.Component {
constructor(props) {
super(props);
this.theadData = [
{ width: '3%', text: gettext('Name') },
{ width: '20%', text: '' },
{ width: '30%', text: gettext('Size') },
{ width: '37%', text: gettext('Delete Time') },
{ width: '10%', text: '' }
];
}
getPreviousPage = () => {
this.props.getListByPage(this.props.currentPage - 1);
};
getNextPage = () => {
this.props.getListByPage(this.props.currentPage + 1);
};
render() {
const { items } = this.props.data;
const { curPerPage, currentPage, hasNextPage } = this.props;
return (
<React.Fragment>
<table className="table-hover">
<thead>
<tr>
{this.theadData.map((item, index) => {
return <th key={index} className={index === 0 ? 'pl-3' : ''} width={item.width}>{item.text}</th>;
})}
</tr>
</thead>
<tbody>
{items.map((item, index) => {
return (
<Item
key={index}
item={item}
getWikiConfig={this.props.getWikiConfig}
/>
);
})}
</tbody>
</table>
<Paginator
gotoPreviousPage={this.getPreviousPage}
gotoNextPage={this.getNextPage}
currentPage={currentPage}
hasNextPage={hasNextPage}
curPerPage={curPerPage}
resetPerPage={this.props.resetPerPage}
/>
</React.Fragment>
);
}
}
Content.propTypes = {
data: PropTypes.object.isRequired,
getListByPage: PropTypes.func.isRequired,
resetPerPage: PropTypes.func.isRequired,
currentPage: PropTypes.number.isRequired,
curPerPage: PropTypes.number.isRequired,
hasNextPage: PropTypes.bool.isRequired,
getWikiConfig: PropTypes.func.isRequired
};
class Item extends React.Component {
constructor(props) {
super(props);
this.state = {
restored: false,
isIconShown: false,
getWikiConfig: PropTypes.func.isRequired
};
}
handleMouseOver = () => {
this.setState({ isIconShown: true });
};
handleMouseOut = () => {
this.setState({ isIconShown: false });
};
restoreItem = (e) => {
e.preventDefault();
const item = this.props.item;
wikiAPI.revertTrashPage(wikiId, item.page_id).then(res => {
this.setState({
restored: true
});
this.props.getWikiConfig();
toaster.success(gettext('Successfully restored 1 item.'));
}).catch((error) => {
let errorMsg = '';
if (error.response) {
errorMsg = error.response.data.error_msg || gettext('Error');
} else {
errorMsg = gettext('Please check the network.');
}
toaster.danger(errorMsg);
});
};
render() {
const item = this.props.item;
const { restored, isIconShown } = this.state;
if (restored) {
return null;
}
const { isAdmin } = window.wiki.config;
return (
<tr onMouseOver={this.handleMouseOver} onMouseOut={this.handleMouseOut} onFocus={this.handleMouseOver}>
<td><NavItemIcon symbol={'file'} disable={true} /></td>
<td>{item.name}</td>
<td>{Utils.bytesToSize(item.size)}</td>
<td title={moment(item.deleted_time).format('LLLL')}>{moment(item.deleted_time).format('YYYY-MM-DD')}</td>
<td>
{isAdmin &&
<a href="#" className={isIconShown ? '' : 'invisible'} onClick={this.restoreItem} role="button">{gettext('Restore')}</a>
}
</td>
</tr>
);
}
}
Item.propTypes = {
item: PropTypes.object.isRequired
};
WikiTrashDialog.propTypes = propTypes;
export default WikiTrashDialog;

View File

@@ -226,6 +226,33 @@ class WikiAPI {
return this._sendPostRequest(url, form);
}
getWikiTrash(wikiId, page, per_page) {
const url = this.server + '/api/v2.1/wiki2/' + wikiId + '/trash/';
let params = {
page: page || 1,
per_page: per_page
};
return this.req.get(url, { params: params });
}
revertTrashPage(wikiId, page_id) {
const url = this.server + '/api/v2.1/wiki2/' + wikiId + '/trash/';
let params = {
page_id: page_id
};
return this.req.put(url, params);
}
cleanWikiTrash(wikiId, days) {
const url = this.server + '/api/v2.1/wiki2/' + wikiId + '/trash/';
let params = {
keep_days: days
};
return this.req.delete(url, {
data: params
});
}
}
let wikiAPI = new WikiAPI();

View File

@@ -6,9 +6,11 @@ import logging
import requests
import posixpath
import time
import datetime
import uuid
import urllib.request, urllib.error, urllib.parse
from copy import deepcopy
from constance import config
from rest_framework import status
from rest_framework.authentication import SessionAuthentication
@@ -24,11 +26,12 @@ from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo
from seahub.utils.db_api import SeafileDB
from seahub.wiki2.models import Wiki2 as Wiki
from seahub.wiki2.models import WikiPageTrash
from seahub.wiki2.utils import is_valid_wiki_name, can_edit_wiki, get_wiki_dirs_by_path, \
get_wiki_config, WIKI_PAGES_DIR, WIKI_CONFIG_PATH, WIKI_CONFIG_FILE_NAME, is_group_wiki, \
check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \
get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \
delete_page, move_nav
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack
from seahub.utils import is_org_context, get_user_repos, gen_inner_file_get_url, gen_file_upload_url, \
normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name
@@ -41,7 +44,7 @@ from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, c
from seahub.settings import SEADOC_SERVER_URL, ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
ENCRYPTED_LIBRARY_VERSION
from seahub.seadoc.sdoc_server_api import SdocServerAPI
from seahub.utils.timeutils import timestamp_to_isoformat_timestr, datetime_to_isoformat_timestr
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
from seahub.utils.ccnet_db import CcnetDB
from seahub.tags.models import FileUUIDMap
from seahub.seadoc.models import SeadocHistoryName, SeadocDraft, SeadocCommentReply
@@ -51,6 +54,7 @@ from seahub.group.utils import group_id_to_name, is_group_admin
from seahub.utils.rpc import SeafileAPI
from seahub.constants import PERMISSION_READ_WRITE
from seaserv import ccnet_api
from seahub.signals import clean_up_repo_trash
HTTP_520_OPERATION_FAILED = 520
@@ -163,7 +167,6 @@ class Wikis2View(APIView):
'wiki_info': group_id_wikis_map[group_obj.id]
}
group_wiki_list.append(group_wiki)
wiki_list = sorted(wiki_list, key=lambda x: x.get('updated_at'), reverse=True)
return Response({'wikis': wiki_list, 'group_wikis': group_wiki_list})
@@ -249,7 +252,7 @@ class Wikis2View(APIView):
logger.error(e)
msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, msg)
repo = seafile_api.get_repo(repo_id)
wiki = Wiki(repo, wiki_owner)
wiki_info = wiki.to_dict()
@@ -258,7 +261,7 @@ class Wikis2View(APIView):
else:
group_id = int(wiki.owner.split('@')[0])
wiki_info['owner_nickname'] = group_id_to_name(group_id)
return Response(wiki_info)
@@ -287,7 +290,7 @@ class Wiki2View(APIView):
repo_id = wiki.repo_id
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = "Wiki library not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
@@ -323,7 +326,7 @@ class Wiki2View(APIView):
"""Delete a wiki.
"""
username = request.user.username
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = 'Wiki not found.'
@@ -335,7 +338,7 @@ class Wiki2View(APIView):
if not check_wiki_admin_permission(wiki, username):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
org_id = -1
if is_org_context(request):
org_id = request.user.org.org_id
@@ -394,7 +397,7 @@ class Wiki2ConfigView(APIView):
def get(self, request, wiki_id):
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -452,7 +455,7 @@ class Wiki2PagesView(APIView):
error_msg = 'page_name invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -545,7 +548,7 @@ class Wiki2PagesView(APIView):
return Response({'file_info': file_info})
def put(self, request, wiki_id):
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -611,7 +614,7 @@ class Wiki2PageView(APIView):
def get(self, request, wiki_id, page_id):
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -681,7 +684,7 @@ class Wiki2PageView(APIView):
})
def delete(self, request, wiki_id, page_id):
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -704,14 +707,13 @@ class Wiki2PageView(APIView):
wiki_config = get_wiki_config(repo_id, username)
pages = wiki_config.get('pages', [])
page_info = next(filter(lambda t: t['id'] == page_id, pages), {})
path = page_info.get('path')
if not page_info:
error_msg = 'page %s not found.' % page_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# check file lock
try:
path = page_info.get('path')
is_locked, locked_by_me = check_file_lock(repo_id, path, username)
except Exception as e:
logger.error(e)
@@ -731,47 +733,30 @@ class Wiki2PageView(APIView):
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# update navigation and page
pop_nav(navigation, page_id)
id_set = get_all_wiki_ids(navigation)
new_pages, old_pages = delete_page(pages, id_set)
for old_page in old_pages:
sdoc_dir_path = os.path.dirname(old_page['path'])
parent_dir = os.path.dirname(sdoc_dir_path)
dir_name = os.path.basename(sdoc_dir_path)
old_page['sdoc_dir_path'] = sdoc_dir_path
old_page['parent_dir'] = parent_dir
old_page['dir_name'] = dir_name
stack_ids = get_parent_id_stack(navigation, page_id)
parent_page_id = stack_ids.pop() if stack_ids else None
subpages = pop_nav(navigation, page_id)
# delete the folder where the sdoc is located
try:
for old_page in old_pages:
seafile_api.del_file(repo_id, old_page['parent_dir'], json.dumps([old_page['dir_name']]), username)
except SearpcError as e:
file_id = seafile_api.get_file_id_by_path(repo_id, page_info['path'])
page_size = seafile_api.get_file_size(repo.store_id, repo.version, file_id)
doc_uuid = os.path.basename(os.path.dirname(page_info['path']))
WikiPageTrash.objects.create(repo_id=repo_id,
doc_uuid=doc_uuid,
page_id=page_info['id'],
parent_page_id=parent_page_id,
subpages=json.dumps(subpages),
name=page_info['name'],
delete_time=datetime.datetime.utcnow(),
size=page_size)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
try: # rm sdoc fileuuid
for old_page in old_pages:
file_name = os.path.basename(old_page['path'])
file_uuid = get_seadoc_file_uuid(repo, old_page['path'])
FileComment.objects.filter(uuid=file_uuid).delete()
FileUUIDMap.objects.delete_fileuuidmap_by_path(repo_id, old_page['sdoc_dir_path'], file_name, is_dir=False)
SeadocHistoryName.objects.filter(doc_uuid=file_uuid).delete()
SeadocDraft.objects.filter(doc_uuid=file_uuid).delete()
SeadocCommentReply.objects.filter(doc_uuid=file_uuid).delete()
except Exception as e:
logger.error(e)
# update wiki_config
try:
wiki_config['navigation'] = navigation
wiki_config['pages'] = new_pages
# TODO: add trash.
if 'trash_pages' in wiki_config:
wiki_config['trash_pages'].extend(old_pages)
else:
wiki_config['trash_pages'] = old_pages
wiki_config = json.dumps(wiki_config)
save_wiki_config(wiki, request.user.username, wiki_config)
except Exception as e:
@@ -794,7 +779,7 @@ class Wiki2DuplicatePageView(APIView):
error_msg = 'page_id invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -908,3 +893,184 @@ class Wiki2DuplicatePageView(APIView):
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'wiki_config': wiki_config})
class WikiPageTrashView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def get(self, request, wiki_id):
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# check argument
try:
current_page = int(request.GET.get('page', '1'))
per_page = int(request.GET.get('per_page', '100'))
except ValueError:
current_page = 1
per_page = 100
start = (current_page - 1) * per_page
end = per_page + start
# check permission
repo_owner = get_repo_owner(request, wiki_id)
wiki.owner = repo_owner
username = request.user.username
if not check_wiki_permission(wiki, username):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
# check resource
repo_id = wiki.repo_id
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
trash_pages = WikiPageTrash.objects.filter(repo_id=repo_id).order_by('-delete_time')
total_count = trash_pages.count()
trash_pages = trash_pages[start: end]
items = []
for item in trash_pages:
items.append(item.to_dict())
return Response({'items': items, 'total_count': total_count})
def put(self, request, wiki_id):
"""revert page"""
page_id = request.data.get('page_id', None)
if not page_id:
error_msg = "Page not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo_owner = get_repo_owner(request, wiki_id)
wiki.owner = repo_owner
username = request.user.username
if not check_wiki_admin_permission(wiki, username):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
repo_id = wiki.repo_id
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# update wiki config
wiki_config = get_wiki_config(repo_id, username)
navigation = wiki_config.get('navigation', [])
try:
page = WikiPageTrash.objects.get(page_id=page_id)
subpages = json.loads(page.subpages)
parent_page_id = page.parent_page_id
revert_nav(navigation, parent_page_id, subpages)
page.delete()
wiki_config = json.dumps(wiki_config)
save_wiki_config(wiki, username, wiki_config)
except Exception as e:
logger.exception(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'success': True})
def delete(self, request, wiki_id):
"""Clean Wiki Trash
Permission checking:
1. wiki owner can perform this action.
2. is group admin."""
# argument check
try:
keep_days = int(request.data.get('keep_days', 0))
except ValueError:
error_msg = 'keep_days invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# resource check
repo_id = wiki.repo_id
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
# permission check
username = request.user.username
repo_owner = get_repo_owner(request, repo_id)
wiki.owner = repo_owner
if not config.ENABLE_USER_CLEAN_TRASH:
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
if not check_wiki_admin_permission(wiki, username):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
wiki_config = get_wiki_config(repo_id, username)
_timestamp = datetime.datetime.now() - datetime.timedelta(days=keep_days)
del_pages = WikiPageTrash.objects.filter(repo_id=repo_id, delete_time__lt=_timestamp)
navigation = wiki_config.get('navigation', [])
pages = wiki_config.get('pages', [])
id_list = []
for del_page in del_pages:
get_sub_ids_by_page_id([(json.loads(del_page.subpages))], id_list)
id_set = set(id_list)
clean_pages, not_del_pages = delete_page(pages, id_set)
try:
file_uuids = []
for del_page in clean_pages:
# rm dir
sdoc_dir_path = os.path.dirname(del_page['path'])
parent_dir = os.path.dirname(sdoc_dir_path)
dir_name = os.path.basename(sdoc_dir_path)
seafile_api.del_file(repo_id, parent_dir,
json.dumps([dir_name]), username)
# rm sdoc fileuuid
file_uuid = get_seadoc_file_uuid(repo, del_page['path'])
file_uuids.append(file_uuid)
FileComment.objects.filter(uuid__in=file_uuids).delete()
FileUUIDMap.objects.filter(uuid__in=file_uuids).delete()
SeadocHistoryName.objects.filter(doc_uuid__in=file_uuids).delete()
SeadocDraft.objects.filter(doc_uuid__in=file_uuids).delete()
SeadocCommentReply.objects.filter(doc_uuid__in=file_uuids).delete()
except Exception as e:
logger.error(e)
try:
seafile_api.clean_up_repo_history(repo_id, 0)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
# update wiki_config
try:
del_pages.delete()
wiki_config['navigation'] = navigation
wiki_config['pages'] = not_del_pages
wiki_config = json.dumps(wiki_config)
save_wiki_config(wiki, username, wiki_config)
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'success': True})

View File

@@ -24,11 +24,13 @@
config: {
wikiId: "{{ wiki.id }}",
repoName: "{{ wiki.name }}",
isAdmin: {% if is_admin %} true {% else %} false {% endif %},
initial_path: "{{ file_path|escapejs }}",
isWiki2: true,
seadocServerUrl: "{{ seadoc_server_url }}",
seadocAccessToken: "{{ seadoc_access_token }}",
permission: "{{ permission }}",
enableUserCleanTrash: {% if enable_user_clean_trash %} true {% else %} false {% endif %}
}
};
</script>

View File

@@ -205,7 +205,8 @@ from seahub.ocm.settings import OCM_ENDPOINT
from seahub.ai.apis import Search
from seahub.wiki2.views import wiki_view
from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, Wiki2DuplicatePageView
from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, \
Wiki2DuplicatePageView, WikiPageTrashView
from seahub.api2.endpoints.subscription import SubscriptionView, SubscriptionPlansView, SubscriptionLogsView
from seahub.api2.endpoints.metadata_manage import MetadataRecords, MetadataManage, MetadataColumns, MetadataRecordInfo, \
MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView
@@ -541,6 +542,7 @@ urlpatterns = [
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/pages/$', Wiki2PagesView.as_view(), name='api-v2.1-wiki2-pages'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/page/(?P<page_id>[-0-9a-zA-Z]{4})/$', Wiki2PageView.as_view(), name='api-v2.1-wiki2-page'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/duplicate-page/$', Wiki2DuplicatePageView.as_view(), name='api-v2.1-wiki2-duplicate-page'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/trash/', WikiPageTrashView.as_view(), name='api-v2.1-wiki2-trash'),
## user::drafts
re_path(r'^api/v2.1/drafts/$', DraftsView.as_view(), name='api-v2.1-drafts'),

View File

@@ -45,6 +45,32 @@ class Wiki2(object):
}
class WikiPageTrash(models.Model):
repo_id = models.CharField(max_length=36, db_index=True)
doc_uuid = models.TextField()
page_id = models.CharField(max_length=4)
parent_page_id = models.CharField(max_length=4)
subpages = models.TextField()
name = models.CharField(max_length=255)
delete_time = models.DateTimeField(auto_now_add=True, blank=False, null=False)
size = models.BigIntegerField(blank=False, null=False)
class Meta:
db_table = 'WikiPageTrash'
def to_dict(self):
return {
'id': self.pk,
'repo_id': self.repo_id,
'doc_uuid': self.doc_uuid,
'page_id': self.page_id,
'parent_page_id': self.parent_page_id,
'subpages': self.subpages,
'name': self.name,
'delete_time': self.delete_time,
'size': self.size
}
###### signal handlers
from django.dispatch import receiver
from seahub.signals import repo_deleted

View File

@@ -13,6 +13,8 @@ from seaserv import seafile_api
from seahub.constants import PERMISSION_READ_WRITE
from seahub.utils import gen_inner_file_get_url, gen_file_upload_url
from seahub.group.utils import is_group_admin, is_group_member
from seahub.wiki2.models import WikiPageTrash
logger = logging.getLogger(__name__)
@@ -229,6 +231,9 @@ def delete_page(pages, id_set):
new_pages.append(page)
else:
old_pages.append(page)
for page in pages:
if page['id'] in id_set:
pages.remove(page)
return new_pages, old_pages
@@ -264,3 +269,52 @@ def move_nav(navigation, target_id, moved_nav, move_position):
if 'children' in nav:
move_nav(nav['children'], target_id, moved_nav, move_position)
def revert_nav(navigation, parent_page_id, subpages):
# connect the subpages to the parent_page
# if not parent_page_id marked as flag, connect the subpages to the root
def recurse(navigation, parent_page_id, subpages):
for nav in navigation:
if nav['id'] == parent_page_id:
if nav['children']:
nav['children'].append(subpages)
else:
nav['children'] = [subpages]
return nav
if 'children' in nav and nav['children']:
result = recurse(nav['children'], parent_page_id, subpages)
if result:
return result
flag = recurse(navigation, parent_page_id, subpages)
if not flag:
navigation.append(subpages)
def get_sub_ids_by_page_id(subpages, ids):
for subpage in subpages:
ids.append(subpage['id'])
if 'children' in subpage:
get_sub_ids_by_page_id(subpage['children'], ids)
def get_parent_id_stack(navigation, page_id):
'''
DFS (Depth First Search)
'''
id_list = []
def return_parent_page_id(navigation, page_id, id_list):
for nav in navigation:
id_list.append(nav['id'])
if nav['id'] == page_id:
id_list.pop()
return True
if 'children' in nav and nav['children']:
result = return_parent_page_id(nav['children'], page_id, id_list)
if result:
return True
id_list.pop()
return_parent_page_id(navigation, page_id, id_list)
return id_list

View File

@@ -4,6 +4,7 @@ import logging
import posixpath
from datetime import datetime
from constance import config
from seaserv import seafile_api
from django.http import Http404
@@ -15,7 +16,7 @@ from seahub.utils.file_types import SEADOC
from seahub.auth.decorators import login_required
from seahub.wiki2.utils import check_wiki_permission, get_wiki_config
from seahub.utils.repo import get_repo_owner
from seahub.utils.repo import get_repo_owner, is_repo_admin, is_repo_owner, is_group_repo_staff
from seahub.settings import SEADOC_SERVER_URL
# Get an instance of a logger
@@ -31,7 +32,7 @@ def wiki_view(request, wiki_id):
if not wiki:
raise Http404
username = request.user.username
repo_owner = get_repo_owner(request, wiki_id)
wiki.owner = repo_owner
@@ -39,7 +40,7 @@ def wiki_view(request, wiki_id):
file_path = ''
if page_id:
wiki_config = get_wiki_config(wiki.repo_id, request.user.username)
wiki_config = get_wiki_config(wiki.repo_id, username)
pages = wiki_config.get('pages', [])
page_info = next(filter(lambda t: t['id'] == page_id, pages), {})
file_path = page_info.get('path', '')
@@ -49,31 +50,34 @@ def wiki_view(request, wiki_id):
is_page = True
# perm check
req_user = request.user.username
permission = check_wiki_permission(wiki, req_user)
if not check_wiki_permission(wiki, req_user):
permission = check_wiki_permission(wiki, username)
if not check_wiki_permission(wiki, username):
return render_permission_error(request, 'Permission denied.')
latest_contributor = ''
last_modified = 0
file_type, ext = get_file_type_and_ext(posixpath.basename(file_path))
repo = seafile_api.get_repo(wiki.repo_id)
repo_id = wiki.repo_id
repo = seafile_api.get_repo(repo_id)
if is_page and file_type == SEADOC:
try:
dirent = seafile_api.get_dirent_by_path(wiki.repo_id, file_path)
dirent = seafile_api.get_dirent_by_path(repo_id, file_path)
if dirent:
latest_contributor, last_modified = dirent.modifier, dirent.mtime
except Exception as e:
logger.warning(e)
is_admin = is_repo_admin(username, repo_id)
last_modified = datetime.fromtimestamp(last_modified)
return render(request, "wiki/wiki_edit.html", {
"wiki": wiki,
"is_admin": is_admin,
"file_path": file_path,
"repo_name": repo.name if repo else '',
"modifier": latest_contributor,
"modify_time": last_modified,
"seadoc_server_url": SEADOC_SERVER_URL,
"permission": permission
"permission": permission,
"enable_user_clean_trash": config.ENABLE_USER_CLEAN_TRASH
})

View File

@@ -1494,3 +1494,18 @@ CREATE TABLE IF NOT EXISTS `FileTrash` (
PRIMARY KEY (`id`),
KEY `ix_FileTrash_repo_id` (`repo_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;
CREATE TABLE `WikiPageTrash` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`repo_id` varchar(36) NOT NULL,
`doc_uuid` text NOT NULL,
`page_id` varchar(4) NOT NULL,
`parent_page_id` varchar(4) default NULL,
`subpages` longtext,
`name` varchar(255) NOT NULL,
`delete_time` datetime NOT NULL,
`size` bigint(20) NOT NULL,
PRIMARY KEY (`id`),
KEY `ix_WikiPageTrash_repo_id` (`repo_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci;