From 117cc325ce752c1b41fd27c8b45d6d0b35e7c1f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=AD=99=E6=B0=B8=E5=BC=BA?= <11704063+s-yongqiang@user.noreply.gitee.com> Date: Mon, 23 Jun 2025 16:22:17 +0800 Subject: [PATCH] update ui --- .../dialog/import-confluence-dialog.js | 169 ++++++++++++++++-- frontend/src/pages/wikis/wikis.js | 129 +++++++------ frontend/src/utils/wiki-api.js | 3 +- seahub/api2/endpoints/wiki2.py | 74 ++++++-- 4 files changed, 294 insertions(+), 81 deletions(-) diff --git a/frontend/src/components/dialog/import-confluence-dialog.js b/frontend/src/components/dialog/import-confluence-dialog.js index 75a8804437..f61e2c37ae 100644 --- a/frontend/src/components/dialog/import-confluence-dialog.js +++ b/frontend/src/components/dialog/import-confluence-dialog.js @@ -1,36 +1,175 @@ import React from 'react'; import PropTypes from 'prop-types'; -import { Modal, ModalBody } from 'reactstrap'; -import { gettext } from '../../utils/constants'; -import SeahubModalHeader from '@/components/common/seahub-modal-header'; -import Loading from '../loading'; - -import '../../css/seahub-io-dialog.css'; +import { Button, Modal, ModalBody, ModalFooter, Label, Alert } from 'reactstrap'; +import SeahubModalHeader from '../common/seahub-modal-header'; +import { gettext, isPro } from '../../utils/constants'; +import wikiAPI from '../../utils/wiki-api'; +import { Utils } from '../../utils/utils'; +import toaster from '../toast'; +import { SeahubSelect } from '../common/select'; const propTypes = { - toggleDialog: PropTypes.func.isRequired, + toggleCancel: PropTypes.func.isRequired, + importConfluence: PropTypes.func.isRequired, + currentDeptID: PropTypes.string, }; class ImportConfluenceDialog extends React.Component { + constructor(props) { super(props); + this.state = { + name: '', + isSubmitBtnActive: false, + selectedOption: null, + options: [], + selectedFile: null, + isUploading: false + }; + this.fileInputRef = React.createRef(); } - toggle = () => { - this.props.toggleDialog(); + componentDidMount() { + if (!isPro) return; + wikiAPI.listWikiDepartments().then(res => { + const departments = res.data.sort((a, b) => { + return a.name.toLowerCase() < b.name.toLowerCase() ? -1 : 1; + }); + let options = []; + for (let i = 0 ; i < departments.length; i++) { + let obj = {}; + obj.value = departments[i].name; + obj.id = departments[i].id; + obj.email = departments[i].email; + obj.label = departments[i].name; + options.push(obj); + } + this.setState({ options }); + if (this.props.currentDeptID) { + const selectedOption = options.find(op => op.id == this.props.currentDeptID); + this.setState({ selectedOption }); + } + }).catch(error => { + let errMessage = Utils.getErrorMsg(error); + toaster.danger(errMessage); + }); + } + + handleKeyDown = (e) => { + if (e.key === 'Enter') { + this.handleSubmit(); + } }; + handleFileChange = (e) => { + const file = e.target.files[0]; + if (!file) return; + + if (!file.name.endsWith('.html.zip')) { + toaster.danger(gettext('Please select a valid Confluence HTML export file (.html.zip)')); + return; + } + + this.setState({ + selectedFile: file, + name: file.name.replace('.html.zip', '') + }); + }; + + handleSubmit = () => { + if (!this.state.selectedFile) { + toaster.danger(gettext('Please select a Confluence export file')); + return; + } + + this.setState({ isUploading: true }); + let departmentID = this.state.selectedOption ? this.state.selectedOption.id : ''; + + this.props.importConfluence(this.state.selectedFile, departmentID) + .then((res) => { + toaster.success(gettext('Successfully imported Confluence data')); + this.props.toggleCancel(); + }) + .catch((error) => { + let errorMsg = Utils.getErrorMsg(error); + toaster.danger(errorMsg || gettext('Failed to import Confluence data')); + }) + .finally(() => { + this.setState({ isUploading: false }); + }); + }; + + triggerFileInput = () => { + this.fileInputRef.current.click(); + }; + + toggle = () => { + this.props.toggleCancel(); + }; + + handleSelectChange = (option) => { + this.setState({ selectedOption: option }); + }; render() { + const { selectedFile, isUploading } = this.state; + return ( - - {gettext('Import Confluence')} + + {gettext('Import Confluence Wiki')} - <> - -
{gettext('Importing Confluence...')}
- + +
+ + + + + {selectedFile ? selectedFile.name : gettext('No file selected')} + +
+ + {gettext('Please select a Confluence HTML export file (.html.zip)')} + +
+ {isPro && + <> + + {return gettext('No options available');}} + /> + + } + + {selectedFile && + + {gettext('The import process may take several minutes depending on the size of your Confluence export.')} + + }
+ + + +
); } diff --git a/frontend/src/pages/wikis/wikis.js b/frontend/src/pages/wikis/wikis.js index 9db583b5ed..cddc809cfb 100644 --- a/frontend/src/pages/wikis/wikis.js +++ b/frontend/src/pages/wikis/wikis.js @@ -102,53 +102,64 @@ class Wikis extends Component { } }; - toggleImportConfluenceDialog = (value) => { - if (value == false) { + toggleImportConfluenceDialog = (currentDeptID) => { + if (this.state.isShowImportConfluenceDialog) { this.setState({ isShowImportConfluenceDialog: false, - }); - } else if (value == true) { - this.setState({ - isShowImportConfluenceDialog: true, + currentDeptID: '', }); } else { this.setState({ - isShowImportConfluenceDialog: !this.state.isShowImportConfluenceDialog, + isShowImportConfluenceDialog: true, + currentDeptID }); } }; - importWikiFromConfluenceZip = () => { - const fileInput = document.createElement('input'); - fileInput.type = 'file'; - fileInput.accept = '.zip'; - fileInput.addEventListener('change', (event) => { - const file = event.target.files[0]; - if (!file) return; + importConfluence = (file, currentDeptID) => { + if (!file) return; + if (!file.name.endsWith('.html.zip')) { + toaster.danger(gettext('Please select a valid Confluence HTML export file (.html.zip)')); + return; + } - if (file.type !== 'application/zip' && !file.name.endsWith('.zip')) { - toaster.danger(gettext('Please select a valid ZIP file')); - return; - } - this.toggleImportConfluenceDialog(true); - wikiAPI.importConfluence(file).then((res) => { - let wikis = this.state.wikis.slice(0); - let new_wiki = res.data; - new_wiki['version'] = 'v2'; - new_wiki['admins'] = new_wiki.group_admins; + return wikiAPI.importConfluence(file, currentDeptID).then((res) => { + let wikis = this.state.wikis.slice(0); + let groupWikis = this.state.groupWikis; + let new_wiki = res.data; + new_wiki['version'] = 'v2'; + new_wiki['admins'] = new_wiki.group_admins; + let findGroup = false; + if (currentDeptID) { + groupWikis.filter(group => { + if (group.group_id === currentDeptID) { + group.wiki_info.push(new_wiki); + findGroup = true; + } + return group; + }); + if (findGroup) { + this.setState({ + groupWikis: groupWikis, + }); + } else { + groupWikis.push({ + group_id: currentDeptID, + group_name: new_wiki.group_name, + wiki_info: [new_wiki], + }); + this.setState({ + groupWikis: groupWikis, + }); + } + } else { wikis.unshift(new_wiki); this.setState({ - wikis, + wikis: wikis, }); - toaster.success(gettext('Successfully uploaded Confluence data')); - }).catch((error) => { - let errorMsg = Utils.getErrorMsg(error); - toaster.danger(errorMsg || gettext('Failed to upload Confluence data')); - }).finally(() => { - this.toggleImportConfluenceDialog(false); - }); + } + return res; }); - fileInput.click(); }; addWiki = (wikiName, currentDeptID) => { @@ -165,14 +176,18 @@ class Wikis extends Component { } return group; }); + this.setState({ + currentDeptID: '', + groupWikis, + }); } else { wikis.push(new_wiki); + wikis.unshift(new_wiki); + this.setState({ + wikis, + }); } - this.setState({ - wikis, - currentDeptID: '', - groupWikis, - }); + }).catch((error) => { if (error.response) { let errMessage = Utils.getErrorMsg(error); @@ -182,6 +197,11 @@ class Wikis extends Component { }; deleteWiki = (wiki) => { + const owner = wiki.owner; + let isGroupWiki = false; + if (owner.includes('@seafile_group')) { + isGroupWiki = true; + } if (wiki.version === 'v1') { wikiAPI.deleteWiki(wiki.id).then(() => { let wikis = this.state.wikis.filter(item => { @@ -203,17 +223,22 @@ class Wikis extends Component { }); } else { wikiAPI.deleteWiki2(wiki.id).then(() => { - let wikis = this.state.wikis.filter(item => { - return item.id !== wiki.id; - }); - let groupWikis = this.state.groupWikis.filter(group => { - group.wiki_info = group.wiki_info.filter(item => item.name !== wiki.name); - return group; - }); - this.setState({ - wikis: wikis, - groupWikis: groupWikis, - }); + if (isGroupWiki) { + let groupWikis = this.state.groupWikis.filter(group => { + group.wiki_info = group.wiki_info.filter(item => item.name !== wiki.name); + return group; + }); + this.setState({ + groupWikis: groupWikis, + }); + } else { + let wikis = this.state.wikis.filter(item => { + return item.id !== wiki.id; + }); + this.setState({ + wikis: wikis, + }); + } }).catch((error) => { if (error.response) { let errorMsg = error.response.data.error_msg; @@ -378,7 +403,9 @@ class Wikis extends Component { {this.state.isShowImportConfluenceDialog && } @@ -392,7 +419,7 @@ class Wikis extends Component { withPlusIcon={true} opList={[ { 'text': gettext('Add Wiki'), 'onClick': () => this.toggleAddWikiDialog() }, - { 'text': gettext('Import Confluence'), 'onClick': () => this.importWikiFromConfluenceZip() } + { 'text': gettext('Import Confluence'), 'onClick': () => this.toggleImportConfluenceDialog() } ]} /> } diff --git a/frontend/src/utils/wiki-api.js b/frontend/src/utils/wiki-api.js index 51a4426b49..3b9ddc85d6 100644 --- a/frontend/src/utils/wiki-api.js +++ b/frontend/src/utils/wiki-api.js @@ -302,10 +302,11 @@ class WikiAPI { return this._sendPostRequest(url, form); } - importConfluence(file) { + importConfluence(file, departmentID) { const url = this.server + '/api/v2.1/import-confluence/'; const formData = new FormData(); formData.append('file', file); + formData.append('group_id', departmentID); return this._sendPostRequest(url, formData); } diff --git a/seahub/api2/endpoints/wiki2.py b/seahub/api2/endpoints/wiki2.py index 7e61df38be..4b6983f12a 100644 --- a/seahub/api2/endpoints/wiki2.py +++ b/seahub/api2/endpoints/wiki2.py @@ -66,6 +66,7 @@ from seahub.constants import PERMISSION_READ_WRITE from seaserv import ccnet_api from seahub.share.utils import is_repo_admin from seahub.api2.endpoints.utils import confluence_to_wiki +from seahub.group.utils import group_id_to_name HTTP_520_OPERATION_FAILED = 520 @@ -1658,7 +1659,15 @@ class ImportConfluenceView(APIView): if not request.user.permissions.can_create_wiki(): return api_error(status.HTTP_403_FORBIDDEN, 'You do not have permission to create wiki.') - + group_id = request.data.get('group_id', None) + if not group_id: + wiki_owner = request.user.username + else: + try: + group_id = int(group_id) + wiki_owner = "%s@seafile_group" % group_id + except: + return api_error(status.HTTP_400_BAD_REQUEST, 'group_id invalid') # create wiki org_id = -1 if is_org_context(request): @@ -1673,10 +1682,43 @@ class ImportConfluenceView(APIView): # The manually exported file name is Confluence-space-export-id.html.zip wiki_name = filename[:-len('.html.zip')] space_key = filename[len('Confluence-space-export-'):-len('.html.zip')] - if org_id and org_id > 0: - repo_id = seafile_api.create_org_repo(wiki_name, '', username, org_id) + + permission = PERMISSION_READ_WRITE + if group_id: + group_id = int(group_id) + # only group admin can create wiki + if not is_group_admin(group_id, request.user.username): + error_msg = 'Permission denied.' + return api_error(status.HTTP_403_FORBIDDEN, error_msg) + + group_quota = seafile_api.get_group_quota(group_id) + group_quota = int(group_quota) + if group_quota <= 0 and group_quota != -2: + error_msg = 'No group quota.' + return api_error(status.HTTP_403_FORBIDDEN, error_msg) + + # create group owned repo + password = None + if is_pro_version() and ENABLE_STORAGE_CLASSES: + if STORAGE_CLASS_MAPPING_POLICY in ('USER_SELECT', 'ROLE_BASED'): + storage_id = None + repo_id = seafile_api.add_group_owned_repo(group_id, + wiki_name, + permission, + password, + enc_version=ENCRYPTED_LIBRARY_VERSION, + storage_id=storage_id) + else: + repo_id = SeafileAPI.add_group_owned_repo( + group_id, wiki_name, password, permission, org_id=org_id) + else: + repo_id = SeafileAPI.add_group_owned_repo( + group_id, wiki_name, password, permission, org_id=org_id) else: - repo_id = seafile_api.create_repo(wiki_name, '', username) + if org_id and org_id > 0: + repo_id = seafile_api.create_org_repo(wiki_name, '', username, org_id) + else: + repo_id = seafile_api.create_repo(wiki_name, '', username) try: seafile_db_api = SeafileDB() @@ -1710,14 +1752,15 @@ class ImportConfluenceView(APIView): if not os.path.exists(extract_dir): extract_dir = self._extract_html_zip(file, space_key) sdoc_output_dir = self._download_sdoc_files(repo_id, space_key, username) - sdoc_files = list(Path(sdoc_output_dir).glob('*.sdoc')) - self._process_zip_file(wiki, extract_dir, sdoc_files, cf_id_to_cf_title_map, username) - # delete server tmp dir - seafile_api.del_file(repo_id, '/', - json.dumps(['tmp']), username) - # clean repo trash - seafile_api.clean_up_repo_history(repo_id, 0) - shutil.rmtree(extract_dir) + if sdoc_output_dir: + sdoc_files = list(Path(sdoc_output_dir).glob('*.sdoc')) + self._process_zip_file(wiki, extract_dir, sdoc_files, cf_id_to_cf_title_map, username) + # delete server tmp dir + seafile_api.del_file(repo_id, '/', + json.dumps(['tmp']), username) + # clean repo trash + seafile_api.clean_up_repo_history(repo_id, 0) + shutil.rmtree(extract_dir) except Exception as e: logger.error(e) if os.path.exists(extract_dir): @@ -1725,10 +1768,11 @@ class ImportConfluenceView(APIView): return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') repo = seafile_api.get_repo(repo_id) - wiki = Wiki(repo, username) + wiki = Wiki(repo, wiki_owner) wiki_info = wiki.to_dict() wiki_info['owner_nickname'] = email2nickname(wiki.owner) - + if group_id: + wiki_info['group_name'] = group_id_to_name(group_id) return Response(wiki_info) def _upload_zip_file(self, repo_id, zip_file, username): @@ -1792,6 +1836,8 @@ class ImportConfluenceView(APIView): def _download_sdoc_files(self, repo_id, space_key, username): server_wiki_tmp_sdoc_output_dir = 'tmp/sdoc_archive.zip' file_id = seafile_api.get_file_id_by_path(repo_id, server_wiki_tmp_sdoc_output_dir) + if not file_id: + return None download_token = seafile_api.get_fileserver_access_token(repo_id, file_id, 'download', username) download_url = gen_file_get_url(download_token, 'sdoc_archive.zip')