1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-09-24 04:48:03 +00:00

move to seafevents process

This commit is contained in:
孙永强
2025-07-09 10:05:56 +08:00
committed by r350178982
parent aa9d8da30a
commit eeb41b28c1
5 changed files with 141 additions and 94 deletions

View File

@@ -0,0 +1,41 @@
import React from 'react';
import PropTypes from 'prop-types';
import { Modal, ModalBody } from 'reactstrap';
import { gettext } from '../../utils/constants';
import SeahubModalHeader from '@/components/common/seahub-modal-header';
import Loading from '../loading';
import '../../css/seahub-io-dialog.css';
const propTypes = {
toggleDialog: PropTypes.func.isRequired,
};
class ImportWikiPageDialog extends React.Component {
constructor(props) {
super(props);
}
toggle = () => {
this.props.toggleDialog();
};
render() {
return (
<Modal className='seahub-io-dialog' isOpen={true} toggle={this.toggle}>
<SeahubModalHeader toggle={this.toggle}>{gettext('Import page')}</SeahubModalHeader>
<ModalBody>
<>
<Loading/>
<div className="seahub-io-dialog-parsing-text">{gettext('Importing page...')}</div>
</>
</ModalBody>
</Modal>
);
}
}
ImportWikiPageDialog.propTypes = propTypes;
export default ImportWikiPageDialog;

View File

@@ -17,7 +17,8 @@ import { DEFAULT_PAGE_NAME } from './constant';
import Wiki2Search from '../../components/search/wiki2-search';
import CommonUndoTool from '../../components/common/common-undo-tool';
import PublishedWikiExtrance from '../../components/published-wiki-entrance';
import { userAPI } from '../../utils/user-api';
import ImportWikiPageDialog from '../../components/dialog/import-wiki-page-dialog';
import './side-panel.css';
const { repoName, publishUrl } = window.wiki.config;
@@ -107,13 +108,51 @@ class SidePanel extends PureComponent {
});
};
queryImportPageStatus = (task_id, task_type, new_page) => {
userAPI.queryIOStatus(task_id, task_type).then(res => {
if (res.data.is_finished === true) {
toaster.success('Import page success.');
this.setState({
isShowImportPageDialog: false
});
this.addPage(new_page, '', null, null, true);
} else {
setTimeout(() => {
this.queryImportPageStatus(task_id, task_type, new_page);
}, 1000);
}
}).catch(err => {
this.setState({
isShowImportPageDialog: false
});
toaster.danger(gettext('Failed to import page. '));
});
};
importPage = async (fromPageConfig, successCallback, errorCallback, jumpToNewPage = true) => {
const { from_page_id, file } = fromPageConfig;
let newPage;
let task_id = ''
this.setState({
isShowImportPageDialog: true
})
wikiAPI.importWiki2Page(wikiId, from_page_id, file).then(res => {
const { page_id, name, path, docUuid } = res.data;
const newPage = new Page({ id: page_id, name, icon: '', path, docUuid });
this.addPage(newPage, '', successCallback, errorCallback, jumpToNewPage);
successCallback && successCallback();
task_id = res.data.task_id;
newPage = new Page({ id: page_id, name, icon: '', path, docUuid });
this.setState({
taskId: task_id
});
return userAPI.queryIOStatus(task_id, 'import');
}).then(res => {
if (res.data.is_finished === true) {
this.setState({
isShowImportPageDialog: false
});
this.addPage(newPage, '', successCallback, errorCallback, jumpToNewPage);
} else {
this.queryImportPageStatus(task_id, 'import', newPage);
}
}).catch((error) => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
@@ -159,6 +198,10 @@ class SidePanel extends PureComponent {
this.setState({ isShowTrashDialog: !this.state.isShowTrashDialog });
};
toggleImportPageDialog = () => {
this.setState({ isShowImportPageDialog: !this.state.isShowImportPageDialog })
}
renderWikiNav = () => {
const { config, onUpdatePage } = this.props;
const { pages, navigation } = config;
@@ -262,6 +305,11 @@ class SidePanel extends PureComponent {
getWikiConfig={this.props.getWikiConfig}
/>
)}
{this.state.isShowImportPageDialog && (
<ImportWikiPageDialog
toggleDialog={this.toggleImportPageDialog}
/>
)}
{wikiPermission === 'rw' &&
<WikiExternalOperations onAddWikiPage={this.onAddWikiPage.bind(false)} />
}

View File

@@ -250,7 +250,7 @@ class WikiAPI {
importWiki2Page(wikiId, pageId, file) {
const url = this.server + '/api/v2.1/wiki2/' + wikiId + '/import-page/';
let form = new FormData();
form.append('page_id', pageId);
form.append('from_page_id', pageId);
if (file) {
form.append('file', file);
}

View File

@@ -7,7 +7,6 @@ import posixpath
import datetime
import uuid
import re
import requests
from copy import deepcopy
from constance import config
from urllib.parse import quote
@@ -24,7 +23,7 @@ from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.endpoints.utils import sdoc_export_to_md, convert_file
from seahub.api2.endpoints.utils import sdoc_export_to_md
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, is_wiki_repo
from seahub.utils.db_api import SeafileDB
@@ -35,11 +34,10 @@ from seahub.wiki2.utils import is_valid_wiki_name, get_wiki_config, WIKI_PAGES_D
check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \
get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, add_convert_wiki_task, \
import_conflunece_to_wiki
import_conflunece_to_wiki, import_wiki_page
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, gen_file_get_url, get_service_url, \
gen_file_upload_url
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, gen_file_get_url, get_service_url
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
from seahub.search.utils import search_wikis, ai_search_wikis
@@ -62,7 +60,6 @@ from seahub.constants import PERMISSION_READ_WRITE
from seaserv import ccnet_api
from seahub.share.utils import is_repo_admin
from seahub.group.utils import group_id_to_name
from seahub.seadoc.apis import batch_upload_sdoc_images
HTTP_520_OPERATION_FAILED = 520
@@ -1583,6 +1580,8 @@ class WikiPageExport(APIView):
export_type = request.GET.get('export_type')
if export_type not in WIKI_PAGE_EXPORT_TYPES:
return api_error(status.HTTP_400_BAD_REQUEST, 'Invalid export type')
# resource check
wiki = Wiki.objects.get(wiki_id=wiki_id)
if not wiki:
error_msg = "Wiki not found."
@@ -1754,12 +1753,13 @@ class Wiki2ImportPageView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def post(self, request, wiki_id):
page_id = request.data.get('page_id', None)
from_page_id = request.data.get('from_page_id', None)
file = request.data.get('file', None)
if not page_id:
error_msg = 'page_id invalid.'
if not from_page_id:
error_msg = 'from_page_id invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
if not file:
error_msg = 'file invalid.'
@@ -1775,7 +1775,7 @@ class Wiki2ImportPageView(APIView):
if not wiki:
error_msg = "Wiki not found."
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo_id = wiki.repo_id
wiki.owner = get_repo_owner(request, wiki_id)
username = request.user.username
if not check_wiki_admin_permission(wiki, username):
@@ -1786,98 +1786,48 @@ class Wiki2ImportPageView(APIView):
if check_quota(repo_id) < 0:
return api_error(443, _("Out of quota."))
wiki_config = get_wiki_config(wiki_id, request.user.username)
tmp_wiki_path = '/tmp/wiki/page'
if not os.path.exists(tmp_wiki_path):
os.makedirs(tmp_wiki_path)
local_file_path = os.path.join(tmp_wiki_path, filename)
with open(local_file_path, 'wb') as f:
f.write(file.read())
wiki_config = get_wiki_config(repo_id, username)
navigation = wiki_config.get('navigation', [])
page_ids = []
get_current_level_page_ids(navigation, page_id, page_ids)
get_current_level_page_ids(navigation, from_page_id, page_ids)
pages = wiki_config.get('pages', [])
exist_page_names = [page.get('name') for page in pages if page.get('id') in page_ids]
page_name = os.path.splitext(filename)[0]
page_name = get_no_duplicate_obj_name(page_name, exist_page_names)
sdoc_uuid = uuid.uuid4()
parent_dir = os.path.join(WIKI_PAGES_DIR, str(sdoc_uuid))
file_path = os.path.join(parent_dir, filename)
if extension == 'docx':
uuid_filename = f'{filename.split(extension)[0]}sdoc'
FileUUIDMap.objects.create_fileuuidmap_by_uuid(sdoc_uuid, repo_id, parent_dir, uuid_filename, is_dir=False)
elif extension == 'md':
uuid_filename = f'{filename.split(extension)[0]}sdoc'
FileUUIDMap.objects.create_fileuuidmap_by_uuid(sdoc_uuid, repo_id, parent_dir, uuid_filename, is_dir=False)
sdoc_uuid_str = str(uuid.uuid4())
parent_dir = os.path.join(WIKI_PAGES_DIR, sdoc_uuid_str)
id_set = get_all_wiki_ids(navigation)
new_page_id = gen_unique_id(id_set)
dir_id = seafile_api.get_dir_id_by_path(repo_id, parent_dir)
if not dir_id:
seafile_api.mkdir_with_parents(repo_id, '/', parent_dir.strip('/'), request.user.username)
obj_id = json.dumps({'parent_dir': parent_dir})
try:
token = seafile_api.get_fileserver_access_token(repo_id,
obj_id, 'upload', request.user.username, use_onetime=False)
except Exception as e:
if str(e) == 'Too many files in library.':
error_msg = _("The number of files in library exceeds the limit")
return api_error(HTTP_447_TOO_MANY_FILES_IN_LIBRARY, error_msg)
else:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if not token:
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
if extension == 'docx':
uuid_filename = f'{filename.split(extension)[0]}sdoc'
elif extension == 'md':
uuid_filename = f'{filename.split(extension)[0]}sdoc'
upload_link = gen_file_upload_url(token, 'upload-api')
upload_link += '?ret-json=1'
if extension == 'md' or extension == 'docx':
src_type = 'docx' if extension == 'docx' else 'markdown'
files = {'file': file}
data = {'parent_dir': parent_dir, 'replace': 1}
resp = requests.post(upload_link, files=files, data=data)
if not resp.ok:
logger.error('save file: %s failed: %s' % (filename, resp.text))
return api_error(resp.status_code, resp.content)
file_id = seafile_api.get_file_id_by_path(repo_id, file_path)
download_token = seafile_api.get_fileserver_access_token(repo_id, file_id, 'download', username)
download_url = gen_file_get_url(download_token, filename)
convert_file(file_path, username, str(sdoc_uuid), download_url, upload_link, src_type, 'sdoc')
file_path = os.path.join(parent_dir, uuid_filename)
sdoc_file_path = os.path.join(parent_dir, uuid_filename)
new_page = {
'id': new_page_id,
'name': page_name,
'path': file_path,
'icon': '',
'docUuid': str(sdoc_uuid),
'locked': False
}
pages.append(new_page)
if len(wiki_config) == 0:
wiki_config['version'] = 1
new_nav = {
'id': new_page_id,
'type': 'page',
}
navigation.append(new_nav)
wiki_config['navigation'] = navigation
wiki_config['pages'] = pages
wiki_config = json.dumps(wiki_config)
save_wiki_config(wiki, request.user.username, wiki_config)
try:
# remove tmp md/docx
if extension in ['md', 'docx']:
seafile_api.del_file(repo_id, parent_dir,
json.dumps([filename]), username)
except SearpcError as e:
logger.warning(e)
task_id = import_wiki_page({
'repo_id': repo_id,
'file_path': local_file_path,
'username': username,
'page_id': new_page_id,
'page_name': page_name,
'sdoc_uuid_str': sdoc_uuid_str,
'parent_dir': parent_dir,
})
return Response({
'page_id': new_page_id,
'path': file_path,
'path': sdoc_file_path,
'name': page_name,
'icon': '',
'docUuid': str(sdoc_uuid),
'locked': False
'docUuid': sdoc_uuid_str,
'task_id': task_id
})

View File

@@ -361,3 +361,11 @@ def import_conflunece_to_wiki(params):
url = urljoin(SEAFEVENTS_SERVER_URL, '/import-confluence-to-wiki')
resp = requests.post(url, json=params, headers=headers, timeout=30)
return json.loads(resp.content)['task_id']
def import_wiki_page(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/import-wiki-page')
resp = requests.post(url, json=params, headers=headers, timeout=30)
return json.loads(resp.content)['task_id']