1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-09-16 15:19:06 +00:00

Merge pull request #7926 from haiwen/import-confluence2wiki

Import confluence v1
This commit is contained in:
Ranjiwei
2025-07-07 16:43:54 +08:00
committed by GitHub
10 changed files with 459 additions and 27 deletions

View File

@@ -0,0 +1,180 @@
import React from 'react';
import PropTypes from 'prop-types';
import { Button, Modal, ModalBody, ModalFooter, Label, Alert } from 'reactstrap';
import SeahubModalHeader from '../common/seahub-modal-header';
import { gettext, isPro } from '../../utils/constants';
import wikiAPI from '../../utils/wiki-api';
import { Utils } from '../../utils/utils';
import toaster from '../toast';
import { SeahubSelect } from '../common/select';
const propTypes = {
toggleCancel: PropTypes.func.isRequired,
importConfluence: PropTypes.func.isRequired,
currentDeptID: PropTypes.string,
};
class ImportConfluenceDialog extends React.Component {
constructor(props) {
super(props);
this.state = {
name: '',
isSubmitBtnActive: false,
selectedOption: null,
options: [],
selectedFile: null,
isUploading: false
};
this.fileInputRef = React.createRef();
}
componentDidMount() {
if (!isPro) return;
wikiAPI.listWikiDepartments().then(res => {
const departments = res.data.sort((a, b) => {
return a.name.toLowerCase() < b.name.toLowerCase() ? -1 : 1;
});
let options = [];
for (let i = 0 ; i < departments.length; i++) {
let obj = {};
obj.value = departments[i].name;
obj.id = departments[i].id;
obj.email = departments[i].email;
obj.label = departments[i].name;
options.push(obj);
}
this.setState({ options });
if (this.props.currentDeptID) {
const selectedOption = options.find(op => op.id == this.props.currentDeptID);
this.setState({ selectedOption });
}
}).catch(error => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
});
}
handleKeyDown = (e) => {
if (e.key === 'Enter') {
this.handleSubmit();
}
};
handleFileChange = (e) => {
const file = e.target.files[0];
if (!file) return;
if (!file.name.endsWith('.html.zip')) {
toaster.danger(gettext('Please select a valid Confluence HTML export file (.html.zip)'));
return;
}
this.setState({
selectedFile: file,
name: file.name.replace('.html.zip', '')
});
};
handleSubmit = () => {
if (!this.state.selectedFile) {
toaster.danger(gettext('Please select a Confluence export file'));
return;
}
this.setState({ isUploading: true });
let departmentID = this.state.selectedOption ? this.state.selectedOption.id : '';
this.props.importConfluence(this.state.selectedFile, departmentID)
.then((res) => {
toaster.success(gettext('Successfully imported Confluence data'));
this.props.toggleCancel();
})
.catch((error) => {
let errorMsg = Utils.getErrorMsg(error);
toaster.danger(errorMsg || gettext('Failed to import Confluence data'));
})
.finally(() => {
this.setState({ isUploading: false });
});
};
triggerFileInput = () => {
this.fileInputRef.current.click();
};
toggle = () => {
this.props.toggleCancel();
};
handleSelectChange = (option) => {
this.setState({ selectedOption: option });
};
render() {
const { selectedFile, isUploading } = this.state;
return (
<Modal isOpen={true} autoFocus={false} toggle={this.toggle}>
<SeahubModalHeader toggle={this.toggle}>{gettext('Import Confluence Wiki')}</SeahubModalHeader>
<ModalBody>
<Label>{gettext('Confluence Export File')}</Label>
<div className="d-flex align-items-center">
<input
type="file"
ref={this.fileInputRef}
style={{ display: 'none' }}
accept=".zip"
onChange={this.handleFileChange}
/>
<Button color="primary" onClick={this.triggerFileInput} disabled={isUploading}>
{gettext('Select File')}
</Button>
<span className="ml-2">
{selectedFile ? selectedFile.name : gettext('No file selected')}
</span>
</div>
<small className="form-text text-muted">
{gettext('Please select a Confluence HTML export file (.html.zip)')}
</small>
<br />
{isPro &&
<>
<Label className='mt-4'>{gettext('Wiki owner')} ({gettext('Optional')})</Label>
<SeahubSelect
onChange={this.handleSelectChange}
options={this.state.options}
hideSelectedOptions={true}
placeholder={gettext('Select a department')}
maxMenuHeight={200}
value={this.state.selectedOption}
noOptionsMessage={() => {return gettext('No options available');}}
/>
</>
}
{selectedFile &&
<Alert color="info" className="mt-3">
{gettext('The import process may take several minutes depending on the size of your Confluence export.')}
</Alert>
}
</ModalBody>
<ModalFooter>
<Button color="secondary" onClick={this.toggle} disabled={isUploading}>{gettext('Cancel')}</Button>
<Button
color="primary"
onClick={this.handleSubmit}
disabled={!this.state.selectedFile || !this.state.name.trim() || isUploading}
>
{isUploading ? gettext('Importing...') : gettext('Import')}
</Button>
</ModalFooter>
</Modal>
);
}
}
ImportConfluenceDialog.propTypes = propTypes;
export default ImportConfluenceDialog;

View File

@@ -12,6 +12,7 @@ import { seafileAPI } from '../../utils/seafile-api';
import { userAPI } from '../../utils/user-api';
import WikiConvertStatusDialog from '../../components/dialog/wiki-convert-status-dialog';
import SingleDropdownToolbar from '../../components/toolbar/single-dropdown-toolbar';
import ImportConfluenceDialog from '../../components/dialog/import-confluence-dialog';
const propTypes = {
@@ -31,6 +32,7 @@ class Wikis extends Component {
isShowAddWikiMenu: false,
isShowAddDialog: false,
isShowConvertStatusDialog: false,
isShowImportConfluenceDialog: false,
};
}
@@ -100,6 +102,75 @@ class Wikis extends Component {
}
};
toggleImportConfluenceDialog = (currentDeptID) => {
if (this.state.isShowImportConfluenceDialog) {
this.setState({
isShowImportConfluenceDialog: false,
currentDeptID: '',
});
} else {
this.setState({
isShowImportConfluenceDialog: true,
currentDeptID
});
}
};
queryImportConfluenceStatus = (task_id, task_type) => {
userAPI.queryIOStatus(task_id, task_type).then(res => {
if (res.data.is_finished === true) {
toaster.success('Import confluence success.');
this.setState({
isShowImportConfluenceDialog: false
});
this.getWikis();
} else {
setTimeout(() => {
this.queryImportConfluenceStatus(task_id, task_type);
}, 1000);
}
}).catch(err => {
this.setState({
isShowImportConfluenceDialog: false
});
toaster.danger(gettext('Failed to import confluence. '));
});
};
importConfluence = (file, currentDeptID) => {
let task_id = '';
wikiAPI.importConfluence(file, currentDeptID).then((res) => {
task_id = res.data.task_id;
this.setState({
taskId: task_id
});
return userAPI.queryIOStatus(task_id, 'import');
}).then(res => {
if (res.data.is_finished === true) {
this.setState({
isShowImportConfluenceDialog: false
});
this.getWikis();
} else {
this.queryImportConfluenceStatus(task_id, 'import');
}
}).catch(error => {
if (error.response && error.response.status === 500) {
const error_msg = error.response.data ? error.response.data['error_msg'] : null;
if (error_msg && error_msg !== 'Internal Server Error') {
toaster.danger(error_msg);
} else {
toaster.danger(gettext('Internal Server Error'));
}
} else {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
}
this.toggleImportConfluenceDialog();
});
};
addWiki = (wikiName, currentDeptID) => {
wikiAPI.addWiki2(wikiName, currentDeptID).then((res) => {
let wikis = this.state.wikis.slice(0);
@@ -114,14 +185,18 @@ class Wikis extends Component {
}
return group;
});
this.setState({
currentDeptID: '',
groupWikis,
});
} else {
wikis.push(new_wiki);
wikis.unshift(new_wiki);
this.setState({
wikis,
});
}
this.setState({
wikis,
currentDeptID: '',
groupWikis,
});
}).catch((error) => {
if (error.response) {
let errMessage = Utils.getErrorMsg(error);
@@ -131,6 +206,11 @@ class Wikis extends Component {
};
deleteWiki = (wiki) => {
const owner = wiki.owner;
let isGroupWiki = false;
if (owner.includes('@seafile_group')) {
isGroupWiki = true;
}
if (wiki.version === 'v1') {
wikiAPI.deleteWiki(wiki.id).then(() => {
let wikis = this.state.wikis.filter(item => {
@@ -152,17 +232,22 @@ class Wikis extends Component {
});
} else {
wikiAPI.deleteWiki2(wiki.id).then(() => {
let wikis = this.state.wikis.filter(item => {
return item.id !== wiki.id;
});
let groupWikis = this.state.groupWikis.filter(group => {
group.wiki_info = group.wiki_info.filter(item => item.name !== wiki.name);
return group;
});
this.setState({
wikis: wikis,
groupWikis: groupWikis,
});
if (isGroupWiki) {
let groupWikis = this.state.groupWikis.filter(group => {
group.wiki_info = group.wiki_info.filter(item => item.name !== wiki.name);
return group;
});
this.setState({
groupWikis: groupWikis,
});
} else {
let wikis = this.state.wikis.filter(item => {
return item.id !== wiki.id;
});
this.setState({
wikis: wikis,
});
}
}).catch((error) => {
if (error.response) {
let errorMsg = error.response.data.error_msg;
@@ -324,6 +409,15 @@ class Wikis extends Component {
/>
</ModalPortal>
}
{this.state.isShowImportConfluenceDialog &&
<ModalPortal>
<ImportConfluenceDialog
toggleCancel={this.toggleImportConfluenceDialog}
importConfluence={this.importConfluence}
currentDeptID={this.state.currentDeptID}
/>
</ModalPortal>
}
<div className="main-panel-center">
<div className="cur-view-container" id="wikis">
<div className="cur-view-path">
@@ -332,7 +426,10 @@ class Wikis extends Component {
{canCreateWiki &&
<SingleDropdownToolbar
withPlusIcon={true}
opList={[{ 'text': gettext('Add Wiki'), 'onClick': () => this.toggleAddWikiDialog() }]}
opList={[
{ 'text': gettext('Add Wiki'), 'onClick': () => this.toggleAddWikiDialog() },
{ 'text': gettext('Import Confluence'), 'onClick': () => this.toggleImportConfluenceDialog() }
]}
/>
}
</div>

View File

@@ -43,8 +43,11 @@ class UserAPI {
return this.req.post(url, data);
}
queryIOStatus(task_id) {
const url = this.server + '/api/v2.1/query-io-status/?task_id=' + task_id;
queryIOStatus(task_id, task_type) {
let url = this.server + '/api/v2.1/query-io-status/?task_id=' + task_id;
if (task_type == 'import') {
url = url + '&task_type=import';
}
return this.req.get(url);
}

View File

@@ -301,6 +301,15 @@ class WikiAPI {
}
return this._sendPostRequest(url, form);
}
importConfluence(file, departmentID) {
const url = this.server + '/api/v2.1/import-confluence/';
const formData = new FormData();
formData.append('file', file);
formData.append('group_id', departmentID);
return this._sendPostRequest(url, formData);
}
}
let wikiAPI = new WikiAPI();

View File

@@ -7,7 +7,7 @@ from rest_framework.views import APIView
from rest_framework import status
from seahub.api2.authentication import TokenAuthentication
from seahub.api2.endpoints.utils import event_export_status
from seahub.api2.endpoints.utils import event_export_status, event_import_status
from seahub.api2.permissions import IsProVersion
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error
@@ -25,13 +25,16 @@ class SeahubIOStatus(APIView):
Get task status by task id
"""
task_id = request.GET.get('task_id', '')
task_type = request.GET.get('task_type')
if not task_id:
error_msg = 'task_id invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
resp = event_export_status(task_id)
if task_type and task_type == 'import':
resp = event_import_status(task_id)
else:
resp = event_export_status(task_id)
if resp.status_code == 500:
logger.error('query export status error: %s, %s' % (task_id, resp.content))
logger.error('query export or import status error: %s, %s' % (task_id, resp.content))
return api_error(500, 'Internal Server Error')
if not resp.status_code == 200:
return api_error(resp.status_code, resp.content)

View File

@@ -338,6 +338,17 @@ def event_export_status(task_id):
return resp
def event_import_status(task_id):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/query-import-status')
params = {'task_id': task_id}
resp = requests.get(url, params=params, headers=headers)
return resp
def delete_user_monitored_cache(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')

View File

@@ -7,7 +7,6 @@ import posixpath
import datetime
import uuid
import re
import requests
from copy import deepcopy
from constance import config
from urllib.parse import quote
@@ -35,7 +34,8 @@ from seahub.wiki2.models import WikiPageTrash, Wiki2Publish
from seahub.wiki2.utils import is_valid_wiki_name, get_wiki_config, WIKI_PAGES_DIR, is_group_wiki, \
check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \
get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, add_convert_wiki_task
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, add_convert_wiki_task, \
import_conflunece_to_wiki
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, gen_file_get_url, get_service_url
@@ -48,7 +48,7 @@ from seahub.utils.file_op import check_file_lock
from seahub.utils.repo import get_repo_owner, is_valid_repo_id_format, is_group_repo_staff, is_repo_owner
from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid
from seahub.settings import ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
ENCRYPTED_LIBRARY_VERSION
ENCRYPTED_LIBRARY_VERSION, SERVICE_URL, MAX_CONFLUENCE_FILE_SIZE
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
from seahub.utils.ccnet_db import CcnetDB
from seahub.tags.models import FileUUIDMap
@@ -60,6 +60,7 @@ from seahub.utils.rpc import SeafileAPI
from seahub.constants import PERMISSION_READ_WRITE
from seaserv import ccnet_api
from seahub.share.utils import is_repo_admin
from seahub.group.utils import group_id_to_name
HTTP_520_OPERATION_FAILED = 520
@@ -1629,3 +1630,121 @@ class WikiPageExport(APIView):
response['Content-Disposition'] = 'attachment;filename*=utf-8''%s;filename="%s"' % (encoded_filename, encoded_filename)
return response
class ImportConfluenceView(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def post(self, request):
# file check
file = request.FILES.get('file')
if not file:
return api_error(status.HTTP_400_BAD_REQUEST, 'File is required')
filename = file.name
if not filename.endswith('.html.zip'):
return api_error(status.HTTP_400_BAD_REQUEST, 'File must be a zip file with .html.zip extension')
file_size = file.size
if file_size > MAX_CONFLUENCE_FILE_SIZE:
return api_error(status.HTTP_400_BAD_REQUEST, 'File is too large')
# permission check
username = request.user.username
if not request.user.permissions.can_add_repo():
return api_error(status.HTTP_403_FORBIDDEN, 'You do not have permission to create library.')
if not request.user.permissions.can_create_wiki():
return api_error(status.HTTP_403_FORBIDDEN, 'You do not have permission to create wiki.')
group_id = request.data.get('group_id', None)
org_id = request.user.org.org_id if is_org_context(request) else -1
underscore_index = filename.rfind('_')
if underscore_index != -1:
# The file name for exporting the script is spaceName_spaceId.html.zip
wiki_name = filename[:underscore_index]
space_key = filename[underscore_index + 1:-len('.html.zip')]
elif 'Confluence-space-export-' in filename:
# The manually exported file name is Confluence-space-export-id.html.zip
wiki_name = filename[:-len('.html.zip')]
space_key = filename[len('Confluence-space-export-'):-len('.html.zip')]
else:
wiki_name = filename[:-len('.html.zip')]
space_key = wiki_name
# create wiki
try:
repo_id = self._create_wiki(group_id, wiki_name, org_id, username)
seafile_db_api = SeafileDB()
seafile_db_api.set_repo_type(repo_id, 'wiki')
except Exception as e:
logger.error(e)
msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, msg)
seafile_server_url = f'{SERVICE_URL}/lib/{repo_id}/file/'
wiki = Wiki.objects.get(wiki_id=repo_id)
if not wiki:
return api_error(status.HTTP_404_NOT_FOUND, 'Wiki not found')
extract_dir = '/tmp/wiki'
space_dir = os.path.join(extract_dir, space_key)
if not os.path.exists(space_dir):
os.makedirs(space_dir)
try:
tmp_zip_file = os.path.join(space_dir, filename)
with open(tmp_zip_file, 'wb') as f:
for chunk in file.chunks():
f.write(chunk)
except Exception as e:
logger.error(e)
msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, msg)
task_id = import_conflunece_to_wiki({
'repo_id': repo_id,
'space_key': space_key,
'file_path': tmp_zip_file,
'tmp_zip_file': tmp_zip_file,
'username': username,
'seafile_server_url': seafile_server_url
})
return Response({'task_id': task_id})
def _create_wiki(self, group_id, wiki_name, org_id, username):
permission = PERMISSION_READ_WRITE
if group_id:
group_id = int(group_id)
# only group admin can create wiki
if not is_group_admin(group_id, username):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
group_quota = seafile_api.get_group_quota(group_id)
group_quota = int(group_quota)
if group_quota <= 0 and group_quota != -2:
error_msg = 'No group quota.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
# create group owned repo
password = None
if is_pro_version() and ENABLE_STORAGE_CLASSES:
if STORAGE_CLASS_MAPPING_POLICY in ('USER_SELECT', 'ROLE_BASED'):
storage_id = None
repo_id = seafile_api.add_group_owned_repo(group_id,
wiki_name,
permission,
password,
enc_version=ENCRYPTED_LIBRARY_VERSION,
storage_id=storage_id)
else:
repo_id = SeafileAPI.add_group_owned_repo(
group_id, wiki_name, password, permission, org_id=org_id)
else:
repo_id = SeafileAPI.add_group_owned_repo(
group_id, wiki_name, password, permission, org_id=org_id)
else:
if org_id and org_id > 0:
repo_id = seafile_api.create_org_repo(wiki_name, '', username, org_id)
else:
repo_id = seafile_api.create_repo(wiki_name, '', username)
return repo_id

View File

@@ -1002,6 +1002,7 @@ ENABLE_FORCE_2FA_TO_ALL_USERS = False
# Enable wiki
ENABLE_WIKI = True
MAX_CONFLUENCE_FILE_SIZE = 100 * 1024 * 1024
# Enable 'repo snapshot label' feature
ENABLE_REPO_SNAPSHOT_LABEL = False

View File

@@ -217,7 +217,7 @@ from seahub.ocm.settings import OCM_ENDPOINT
from seahub.wiki2.views import wiki_view, wiki_publish_view, wiki_history_view
from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, \
Wiki2DuplicatePageView, WikiPageTrashView, Wiki2PublishView, Wiki2PublishConfigView, Wiki2PublishPageView, \
WikiSearch, WikiConvertView, WikiPageExport
WikiSearch, WikiConvertView, WikiPageExport, ImportConfluenceView
from seahub.api2.endpoints.subscription import SubscriptionView, SubscriptionPlansView, SubscriptionLogsView
from seahub.api2.endpoints.user_list import UserListView
from seahub.api2.endpoints.seahub_io import SeahubIOStatus
@@ -610,6 +610,7 @@ urlpatterns = [
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/publish/$', Wiki2PublishView.as_view(), name='api-v2.1-wiki2-publish'),
re_path(r'^api/v2.1/wiki2/search/$', WikiSearch.as_view(), name='api-v2.1-wiki2-search'),
re_path(r'^api/v2.1/convert-wiki/$', WikiConvertView.as_view(), name='api-v2.1-wiki-convert'),
re_path(r'^api/v2.1/import-confluence/$', ImportConfluenceView.as_view(), name='api-v2.1-import-confluence'),
## user::drafts

View File

@@ -353,3 +353,11 @@ def add_convert_wiki_task(params):
url = urljoin(SEAFEVENTS_SERVER_URL, '/add-convert-wiki-task')
resp = requests.get(url, params=params, headers=headers)
return json.loads(resp.content)['task_id']
def import_conflunece_to_wiki(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/import-confluence-to-wiki')
resp = requests.post(url, json=params, headers=headers, timeout=30)
return json.loads(resp.content)['task_id']