mirror of
https://github.com/haiwen/seahub.git
synced 2025-09-24 12:58:34 +00:00
remove support sdozip
This commit is contained in:
@@ -65,7 +65,7 @@ export default class PageDropdownMenu extends Component {
|
|||||||
const { page } = this.props;
|
const { page } = this.props;
|
||||||
const fileInput = document.createElement('input');
|
const fileInput = document.createElement('input');
|
||||||
fileInput.type = 'file';
|
fileInput.type = 'file';
|
||||||
fileInput.accept = '.sdoczip,.docx,.md';
|
fileInput.accept = '.docx,.md';
|
||||||
fileInput.style.display = 'none';
|
fileInput.style.display = 'none';
|
||||||
|
|
||||||
fileInput.addEventListener('change', (e) => {
|
fileInput.addEventListener('change', (e) => {
|
||||||
|
@@ -8,11 +8,9 @@ import datetime
|
|||||||
import uuid
|
import uuid
|
||||||
import re
|
import re
|
||||||
import requests
|
import requests
|
||||||
import shutil
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from constance import config
|
from constance import config
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
from zipfile import ZipFile
|
|
||||||
|
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
from rest_framework.authentication import SessionAuthentication
|
from rest_framework.authentication import SessionAuthentication
|
||||||
@@ -29,9 +27,6 @@ from seahub.api2.authentication import TokenAuthentication
|
|||||||
from seahub.api2.endpoints.utils import sdoc_export_to_md, convert_file
|
from seahub.api2.endpoints.utils import sdoc_export_to_md, convert_file
|
||||||
from seahub.api2.throttling import UserRateThrottle
|
from seahub.api2.throttling import UserRateThrottle
|
||||||
from seahub.api2.utils import api_error, is_wiki_repo
|
from seahub.api2.utils import api_error, is_wiki_repo
|
||||||
from seahub.utils import HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, get_service_url, gen_file_upload_url
|
|
||||||
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
|
|
||||||
from seahub.search.utils import search_wikis, ai_search_wikis
|
|
||||||
from seahub.utils.db_api import SeafileDB
|
from seahub.utils.db_api import SeafileDB
|
||||||
from seahub.wiki2.models import Wiki2 as Wiki
|
from seahub.wiki2.models import Wiki2 as Wiki
|
||||||
from seahub.wiki.models import Wiki as OldWiki
|
from seahub.wiki.models import Wiki as OldWiki
|
||||||
@@ -43,7 +38,8 @@ from seahub.wiki2.utils import is_valid_wiki_name, get_wiki_config, WIKI_PAGES_D
|
|||||||
import_conflunece_to_wiki
|
import_conflunece_to_wiki
|
||||||
|
|
||||||
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
|
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
|
||||||
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, gen_file_get_url, get_service_url
|
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH, gen_file_get_url, get_service_url, \
|
||||||
|
gen_file_upload_url
|
||||||
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
|
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
|
||||||
from seahub.search.utils import search_wikis, ai_search_wikis
|
from seahub.search.utils import search_wikis, ai_search_wikis
|
||||||
|
|
||||||
@@ -51,8 +47,7 @@ from seahub.views import check_folder_permission
|
|||||||
from seahub.base.templatetags.seahub_tags import email2nickname
|
from seahub.base.templatetags.seahub_tags import email2nickname
|
||||||
from seahub.utils.file_op import check_file_lock
|
from seahub.utils.file_op import check_file_lock
|
||||||
from seahub.utils.repo import get_repo_owner, is_valid_repo_id_format, is_group_repo_staff, is_repo_owner
|
from seahub.utils.repo import get_repo_owner, is_valid_repo_id_format, is_group_repo_staff, is_repo_owner
|
||||||
from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid, \
|
from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid
|
||||||
ZSDOC
|
|
||||||
from seahub.settings import ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
|
from seahub.settings import ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
|
||||||
ENCRYPTED_LIBRARY_VERSION, SERVICE_URL, MAX_CONFLUENCE_FILE_SIZE
|
ENCRYPTED_LIBRARY_VERSION, SERVICE_URL, MAX_CONFLUENCE_FILE_SIZE
|
||||||
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
|
from seahub.utils.timeutils import timestamp_to_isoformat_timestr
|
||||||
@@ -1772,7 +1767,7 @@ class Wiki2ImportPageView(APIView):
|
|||||||
|
|
||||||
filename = file.name
|
filename = file.name
|
||||||
extension = filename.split('.')[-1].lower()
|
extension = filename.split('.')[-1].lower()
|
||||||
if extension not in ['sdoczip', 'docx', 'md']:
|
if extension not in ['docx', 'md']:
|
||||||
error_msg = 'file invalid.'
|
error_msg = 'file invalid.'
|
||||||
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
|
||||||
|
|
||||||
@@ -1804,9 +1799,7 @@ class Wiki2ImportPageView(APIView):
|
|||||||
parent_dir = os.path.join(WIKI_PAGES_DIR, str(sdoc_uuid))
|
parent_dir = os.path.join(WIKI_PAGES_DIR, str(sdoc_uuid))
|
||||||
file_path = os.path.join(parent_dir, filename)
|
file_path = os.path.join(parent_dir, filename)
|
||||||
|
|
||||||
if extension == 'sdoczip':
|
if extension == 'docx':
|
||||||
FileUUIDMap.objects.create_fileuuidmap_by_uuid(sdoc_uuid, repo_id, parent_dir, filename[:-3], is_dir=False)
|
|
||||||
elif extension == 'docx':
|
|
||||||
uuid_filename = f'{filename.split(extension)[0]}sdoc'
|
uuid_filename = f'{filename.split(extension)[0]}sdoc'
|
||||||
FileUUIDMap.objects.create_fileuuidmap_by_uuid(sdoc_uuid, repo_id, parent_dir, uuid_filename, is_dir=False)
|
FileUUIDMap.objects.create_fileuuidmap_by_uuid(sdoc_uuid, repo_id, parent_dir, uuid_filename, is_dir=False)
|
||||||
elif extension == 'md':
|
elif extension == 'md':
|
||||||
@@ -1850,43 +1843,6 @@ class Wiki2ImportPageView(APIView):
|
|||||||
convert_file(file_path, username, str(sdoc_uuid), download_url, upload_link, src_type, 'sdoc')
|
convert_file(file_path, username, str(sdoc_uuid), download_url, upload_link, src_type, 'sdoc')
|
||||||
file_path = os.path.join(parent_dir, uuid_filename)
|
file_path = os.path.join(parent_dir, uuid_filename)
|
||||||
|
|
||||||
elif extension == 'sdoczip':
|
|
||||||
file_path = file_path[:-3]
|
|
||||||
tmp_dir = str(uuid.uuid4())
|
|
||||||
tmp_root_dir = os.path.join('/tmp/seahub', str(repo_id))
|
|
||||||
tmp_extracted_path = os.path.join(tmp_root_dir, 'sdoc_zip_extracted/', tmp_dir)
|
|
||||||
tmp_extracted_path = os.path.normpath(tmp_extracted_path)
|
|
||||||
try:
|
|
||||||
with ZipFile(file) as zip_file:
|
|
||||||
zip_file.extractall(tmp_extracted_path)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
|
|
||||||
|
|
||||||
sdoc_file_path = os.path.join(tmp_extracted_path, 'content.json')
|
|
||||||
sdoc_file_path = os.path.normpath(sdoc_file_path)
|
|
||||||
sdoc_file_name = filename.replace(ZSDOC, 'sdoc')
|
|
||||||
new_sdoc_file_path = os.path.join(tmp_extracted_path, sdoc_file_name)
|
|
||||||
new_sdoc_file_path = os.path.normpath(new_sdoc_file_path)
|
|
||||||
|
|
||||||
os.rename(sdoc_file_path, new_sdoc_file_path)
|
|
||||||
print(new_sdoc_file_path)
|
|
||||||
files = {'file': open(new_sdoc_file_path, 'rb')}
|
|
||||||
data = {'parent_dir': parent_dir, 'replace': 1}
|
|
||||||
resp = requests.post(upload_link, files=files, data=data)
|
|
||||||
if not resp.ok:
|
|
||||||
logger.error('save file: %s failed: %s' % (filename, resp.text))
|
|
||||||
return api_error(resp.status_code, resp.content)
|
|
||||||
|
|
||||||
# upload sdoc images
|
|
||||||
image_dir = os.path.join(tmp_extracted_path, 'images/')
|
|
||||||
image_dir = os.path.normpath(image_dir)
|
|
||||||
if os.path.exists(image_dir):
|
|
||||||
batch_upload_sdoc_images(str(sdoc_uuid), repo_id, username, image_dir)
|
|
||||||
# remove tmp dir
|
|
||||||
if os.path.exists(tmp_root_dir):
|
|
||||||
shutil.rmtree(tmp_root_dir)
|
|
||||||
|
|
||||||
new_page = {
|
new_page = {
|
||||||
'id': new_page_id,
|
'id': new_page_id,
|
||||||
'name': page_name,
|
'name': page_name,
|
||||||
|
@@ -1274,7 +1274,7 @@ if os.environ.get('ENABLE_SEADOC', ''):
|
|||||||
ENABLE_SEADOC = os.environ.get('ENABLE_SEADOC', '').lower() == 'true'
|
ENABLE_SEADOC = os.environ.get('ENABLE_SEADOC', '').lower() == 'true'
|
||||||
SEADOC_PRIVATE_KEY = JWT_PRIVATE_KEY
|
SEADOC_PRIVATE_KEY = JWT_PRIVATE_KEY
|
||||||
SEADOC_SERVER_URL = os.environ.get('SEADOC_SERVER_URL', '') or SEADOC_SERVER_URL
|
SEADOC_SERVER_URL = os.environ.get('SEADOC_SERVER_URL', '') or SEADOC_SERVER_URL
|
||||||
# FILE_CONVERTER_SERVER_URL = SEADOC_SERVER_URL.rstrip('/') + '/converter'
|
FILE_CONVERTER_SERVER_URL = SEADOC_SERVER_URL.rstrip('/') + '/converter'
|
||||||
|
|
||||||
if os.environ.get('ENABLE_EXCALIDRAW', ''):
|
if os.environ.get('ENABLE_EXCALIDRAW', ''):
|
||||||
ENABLE_EXCALIDRAW = os.environ.get('ENABLE_EXCALIDRAW', '').lower() == 'true'
|
ENABLE_EXCALIDRAW = os.environ.get('ENABLE_EXCALIDRAW', '').lower() == 'true'
|
||||||
|
Reference in New Issue
Block a user