1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-09-25 14:50:29 +00:00

Export/Import tags (#7888)

* export tags

* remove child links

* update

* optimize code and update import tags loading

* optimize code

* optimize var

---------

Co-authored-by: 孙永强 <11704063+s-yongqiang@user.noreply.gitee.com>
This commit is contained in:
awu0403
2025-06-10 17:59:31 +08:00
committed by GitHub
parent 355febb30d
commit e8af55a1a4
16 changed files with 408 additions and 15 deletions

View File

@@ -161,7 +161,7 @@ class DirPath extends React.Component {
<span className="path-split">/</span>
<span className="path-item path-item-read-only">{gettext('Tags')}</span>
<span className="path-split">/</span>
<TagViewName id={tagId} canSelectAllTags={canSelectAllTags} />
<TagViewName id={tagId} canSelectAllTags={canSelectAllTags} repoID={this.props.repoID} />
{children && (
<>
<span className="path-split">/</span>

View File

@@ -0,0 +1,41 @@
import React from 'react';
import PropTypes from 'prop-types';
import { Modal, ModalBody } from 'reactstrap';
import { gettext } from '../../utils/constants';
import SeahubModalHeader from '@/components/common/seahub-modal-header';
import Loading from '../loading';
import '../../css/seahub-io-dialog.css';
const propTypes = {
toggleDialog: PropTypes.func.isRequired,
};
class ImportTagsDialog extends React.Component {
constructor(props) {
super(props);
}
toggle = () => {
this.props.toggleDialog();
};
render() {
return (
<Modal className='seahub-io-dialog' isOpen={true} toggle={this.toggle}>
<SeahubModalHeader toggle={this.toggle}>{gettext('Import tags')}</SeahubModalHeader>
<ModalBody>
<>
<Loading/>
<div className="seahub-io-dialog-parsing-text">{gettext('Importing tags...')}</div>
</>
</ModalBody>
</Modal>
);
}
}
ImportTagsDialog.propTypes = propTypes;
export default ImportTagsDialog;

View File

@@ -6,6 +6,7 @@ export const OPERATION = {
NEW_SUB_TAG: 'new_sub_tag',
MERGE_TAGS: 'merge_tags',
ADD_CHILD_TAGS: 'add_child_tags',
EXPORT_TAGS: 'export_tags',
};
export const POPUP_EDITOR_OPERATION_KEYS = [OPERATION.SET_SUB_TAGS, OPERATION.ADD_CHILD_TAGS];

View File

@@ -230,7 +230,7 @@ const style = `
padding: 0;
}
.loading-tip {
.metrics-container .loading-tip {
margin: 100px auto;
text-align: center;
}

View File

@@ -143,6 +143,20 @@ class TagsManagerAPI {
return this.req.post(url);
};
exportTags = (repoID, tagsIds) => {
const url = this.server + '/api/v2.1/repos/' + repoID + '/metadata/export-tags/';
const params = {
tags_ids: tagsIds,
};
return this.req.post(url, params);
};
importTags = (repoID, file) => {
const url = this.server + '/api/v2.1/repos/' + repoID + '/metadata/import-tags/';
const formData = new FormData();
formData.append('file', file);
return this._sendPostRequest(url, formData);
};
}
const tagsAPI = new TagsManagerAPI();

View File

@@ -5,14 +5,18 @@ import EditTagDialog from '../../dialog/edit-tag-dialog';
import { isEnter, isSpace } from '../../../../utils/hotkey';
import { gettext } from '../../../../utils/constants';
import { useTags } from '../../../hooks';
import tagsAPI from '../../../api';
import ImportTagsDialog from '../../../../components/dialog/import-tags-dialog';
import toaster from '../../../../components/toast';
import { Utils } from '../../../../utils/utils';
import './index.css';
const AllTagsOperationToolbar = () => {
const AllTagsOperationToolbar = ({ repoID }) => {
const [isMenuOpen, setMenuOpen] = useState(false);
const [isShowEditTagDialog, setShowEditTagDialog] = useState(false);
const { tagsData, addTag } = useTags();
const [isShowImportLoadingDialog, setShowImportLoadingDialog] = useState(false);
const { tagsData, addTag, reloadTags } = useTags();
const tags = useMemo(() => {
if (!tagsData) return [];
@@ -41,6 +45,28 @@ const AllTagsOperationToolbar = () => {
addTag(tag, callback);
}, [addTag]);
const handleImportTags = useCallback(() => {
const fileInput = document.createElement('input');
fileInput.type = 'file';
fileInput.accept = '.json';
fileInput.onchange = async (e) => {
const file = e.target.files[0];
setShowImportLoadingDialog(true);
tagsAPI.importTags(repoID, file).then(res => {
toaster.success(gettext('Successfully imported tags.'));
setTimeout(() => {
reloadTags(true);
}, 10);
}).catch(error => {
const errorMsg = Utils.getErrorMsg(error);
toaster.danger(errorMsg || gettext('Failed to import tags'));
}).finally(() => {
setShowImportLoadingDialog(false);
});
};
fileInput.click();
}, [reloadTags, repoID]);
return (
<>
<div className="dir-operation">
@@ -62,12 +88,19 @@ const AllTagsOperationToolbar = () => {
<i className="sf3-font sf3-font-new mr-2 dropdown-item-icon"></i>
{gettext('New tag')}
</DropdownItem>
<DropdownItem onClick={handleImportTags}>
<i className="sf3-font-import-sdoc sf3-font mr-2 dropdown-item-icon"></i>
{gettext('Import tags')}
</DropdownItem>
</DropdownMenu>
</Dropdown>
</div>
{isShowEditTagDialog && (
<EditTagDialog tags={tags} title={gettext('New tag')} onToggle={closeAddTag} onSubmit={handelAddTags} />
)}
{isShowImportLoadingDialog && (
<ImportTagsDialog toggleDialog={() => setShowImportLoadingDialog(false)} />
)}
</>
);
};

View File

@@ -8,7 +8,7 @@ import { gettext } from '../../../utils/constants';
import AllTagsOperationToolbar from './all-tags-operation-toolbar';
import { EVENT_BUS_TYPE } from '../../../metadata/constants';
const TagViewName = ({ id, canSelectAllTags }) => {
const TagViewName = ({ id, canSelectAllTags, repoID }) => {
const { tagsData, context } = useTags();
const selectAllTags = () => {
@@ -31,7 +31,7 @@ const TagViewName = ({ id, canSelectAllTags }) => {
);
}
return (
<AllTagsOperationToolbar/>
<AllTagsOperationToolbar repoID={repoID} />
);
}
const tag = getRowById(tagsData, id);
@@ -43,6 +43,7 @@ const TagViewName = ({ id, canSelectAllTags }) => {
TagViewName.propTypes = {
id: PropTypes.string,
repoID: PropTypes.string,
canSelectAllTags: PropTypes.bool,
};

View File

@@ -98,6 +98,11 @@ class Context {
return true;
};
checkCanExportTags = () => {
if (this.permission === 'r') return false;
return true;
};
canModifyTags = () => {
if (this.permission === 'r') return false;
return true;
@@ -131,6 +136,14 @@ class Context {
mergeTags = (target_tag_id, merged_tags_ids) => {
return this.api.mergeTags(this.repoId, target_tag_id, merged_tags_ids);
};
exportTags = (tagsIds) => {
return this.api.exportTags(this.repoId, tagsIds);
};
importTags = (file) => {
return this.api.importTags(this.repoId, file);
};
}
export default Context;

View File

@@ -42,9 +42,9 @@ export const TagsProvider = ({ repoID, currentPath, selectTagsView, tagsChangedC
setTagsData(data);
}, []);
const reloadTags = useCallback(() => {
const reloadTags = useCallback((force = false) => {
setReloading(true);
storeRef.current.reload(PER_LOAD_NUMBER).then(() => {
storeRef.current.reload(PER_LOAD_NUMBER, force).then(() => {
setTagsData(storeRef.current.data);
setReloading(false);
}).catch(error => {
@@ -326,6 +326,7 @@ export const TagsProvider = ({ repoID, currentPath, selectTagsView, tagsChangedC
modifyColumnWidth,
modifyLocalFileTags,
modifyTagsSort,
reloadTags,
}}>
{children}
</TagsContext.Provider>

View File

@@ -66,9 +66,13 @@ class Store {
await this.loadTagsData(limit);
}
async reload(limit = PER_LOAD_NUMBER) {
async reload(limit = PER_LOAD_NUMBER, force = false) {
const currentTime = new Date();
if (dayjs(currentTime).diff(this.loadTime, 'hours') > 1) {
if (force) {
this.loadTime = currentTime;
this.startIndex = 0;
await this.loadTagsData(limit);
} else if (dayjs(currentTime).diff(this.loadTime, 'hours') > 1) {
this.loadTime = currentTime;
this.startIndex = 0;
await this.loadTagsData(limit);

View File

@@ -9,7 +9,7 @@ export const OPERATION_TYPE = {
DELETE_TAG_LINKS: 'delete_tag_links',
DELETE_TAGS_LINKS: 'delete_tags_links',
MERGE_TAGS: 'merge_tags',
EXPORT_TAGS: 'export_tags',
MODIFY_LOCAL_RECORDS: 'modify_local_records',
MODIFY_LOCAL_FILE_TAGS: 'modify_local_file_tags',
@@ -32,6 +32,7 @@ export const OPERATION_ATTRIBUTES = {
[OPERATION_TYPE.MODIFY_LOCAL_FILE_TAGS]: ['file_id', 'tags_ids'],
[OPERATION_TYPE.MODIFY_COLUMN_WIDTH]: ['column_key', 'new_width', 'old_width'],
[OPERATION_TYPE.MODIFY_TAGS_SORT]: ['sort'],
[OPERATION_TYPE.EXPORT_TAGS]: ['repo_id', 'tags_ids'],
};
export const UNDO_OPERATION_TYPE = [

View File

@@ -3,6 +3,7 @@ import { OPERATION_TYPE } from './operations';
import { getColumnByKey } from '../../metadata/utils/column';
import ObjectUtils from '../../utils/object';
import { PRIVATE_COLUMN_KEY } from '../constants';
import tagsAPI from '../api';
const MAX_LOAD_RECORDS = 100;
@@ -115,6 +116,37 @@ class ServerOperator {
});
break;
}
case OPERATION_TYPE.EXPORT_TAGS: {
const { repo_id, tags_ids } = operation;
tagsAPI.exportTags(repo_id, tags_ids).then((res) => {
let fileName;
if (res.data && Array.isArray(res.data)) {
if (res.data.length === 1) {
fileName = `${res.data[0]._tag_name}.json`;
} else {
const now = new Date();
const dateStr = now.toISOString().split('T')[0];
fileName = `tags-export-${dateStr}.json`;
}
} else {
fileName = 'tags.json';
}
const blob = new Blob([JSON.stringify(res.data, null, 2)], { type: 'application/json' });
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = fileName;
document.body.appendChild(a);
a.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(a);
callback({ operation });
}).catch((error) => {
callback({ error: gettext('Failed to export tags') });
});
break;
}
case OPERATION_TYPE.RESTORE_RECORDS: {
const { repo_id, rows_data } = operation;
if (!Array.isArray(rows_data) || rows_data.length === 0) {
@@ -194,7 +226,6 @@ class ServerOperator {
}).catch (error => {
// for debug
// eslint-disable-next-line no-console
console.log(error);
this.asyncReloadRecords(restRowsIds, repoId, relatedColumnKeyMap, callback);
});
}

View File

@@ -27,6 +27,7 @@ export const createContextMenuOptions = ({
onMergeTags,
}) => {
const canDeleteTag = context.checkCanDeleteTag();
const canExportTags = context.checkCanExportTags();
const canAddTag = context.canAddTag();
const eventBus = EventBus.getInstance();
@@ -58,6 +59,10 @@ export const createContextMenuOptions = ({
eventBus.dispatch(EVENT_BUS_TYPE.OPEN_EDITOR, null, option.value);
break;
}
case OPERATION.EXPORT_TAGS: {
eventBus.dispatch(EVENT_BUS_TYPE.EXPORT_TAGS, option.tagsIds);
break;
}
default: {
break;
}
@@ -78,6 +83,13 @@ export const createContextMenuOptions = ({
tagsIds.push(tag._id);
}
}
if (canExportTags && tagsIds.length > 0) {
options.push({
label: gettext('Export tags'),
value: OPERATION.EXPORT_TAGS,
tagsIds,
});
}
if (canDeleteTag && tagsIds.length > 0) {
if (tagsIds.length === 1) {
options.push({
@@ -125,6 +137,13 @@ export const createContextMenuOptions = ({
tagsIds,
});
}
if (canExportTags && tagsIds.length > 0) {
options.push({
label: gettext('Export tags'),
value: OPERATION.EXPORT_TAGS,
tagsIds,
});
}
return options;
}
@@ -169,6 +188,13 @@ export const createContextMenuOptions = ({
}
);
}
if (isNameColumn && canExportTags) {
options.push({
label: gettext('Export tags'),
value: OPERATION.EXPORT_TAGS,
tagsIds: [tag._id],
});
}
return options;
};

View File

@@ -17,6 +17,9 @@ import { isNumber } from '../../../../utils/number';
import { getTreeNodeByKey, getTreeNodeId } from '../../../../components/sf-table/utils/tree';
import { getRowById } from '../../../../components/sf-table/utils/table';
import { getParentLinks } from '../../../utils/cell';
import ServerOperator from '../../../store/server-operator';
import toaster from '../../../../components/toast';
import { OPERATION_TYPE } from '../../../store/operations/constants';
import './index.css';
@@ -269,18 +272,35 @@ const TagsTable = ({
}, 0);
}, [eventBus, toggleShowDirentToolbar]);
const onExportTags = useCallback((tagsIds) => {
const operation = {
op_type: OPERATION_TYPE.EXPORT_TAGS,
repo_id: context.repoId,
tags_ids: tagsIds
};
const serverOperator = new ServerOperator(context);
serverOperator.applyOperation(operation, null, ({ error }) => {
if (error) {
toaster.danger(error);
}
});
}, [context]);
useEffect(() => {
const unsubscribeUpdateSearchResult = eventBus.subscribe(EVENT_BUS_TYPE.UPDATE_SEARCH_RESULT, updateSearchResult);
const unsubscribeDeleteTags = eventBus.subscribe(EVENT_BUS_TYPE.DELETE_TAGS, onDeleteTags);
const unsubscribeMergeTags = eventBus.subscribe(EVENT_BUS_TYPE.MERGE_TAGS, onMergeTags);
const unsubscribeNewSubTag = eventBus.subscribe(EVENT_BUS_TYPE.NEW_SUB_TAG, onNewSubTag);
const unsubscribeExportTags = eventBus.subscribe(EVENT_BUS_TYPE.EXPORT_TAGS, onExportTags);
return () => {
unsubscribeUpdateSearchResult();
unsubscribeDeleteTags();
unsubscribeMergeTags();
unsubscribeNewSubTag();
unsubscribeExportTags();
};
}, [eventBus, updateSearchResult, onDeleteTags, onMergeTags, onNewSubTag, updateSelectedTagIds]);
}, [eventBus, updateSearchResult, onDeleteTags, onMergeTags, onNewSubTag, onExportTags]);
return (
<>

View File

@@ -9,6 +9,7 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework import status
from rest_framework.views import APIView
from django.http import HttpResponse
from seahub.api2.utils import api_error
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.authentication import TokenAuthentication
@@ -2964,3 +2965,207 @@ class MetadataMigrateTags(APIView):
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
return Response({'success': True})
class MetadataExportTags(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def post(self, request, repo_id):
tags_ids = request.data.get('tags_ids', None)
if not tags_ids:
return api_error(status.HTTP_400_BAD_REQUEST, 'tags_ids invalid')
metadata = RepoMetadata.objects.filter(repo_id=repo_id).first()
if not metadata or not metadata.enabled or not metadata.tags_enabled:
error_msg = f'The tags is disabled for repo {repo_id}.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if not can_read_metadata(request, repo_id):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
metadata_server_api = MetadataServerAPI(repo_id, request.user.username)
from seafevents.repo_metadata.constants import TAGS_TABLE
export_data = []
tags_ids_str = ', '.join([f'"{id}"' for id in tags_ids])
sql = f'SELECT * FROM {TAGS_TABLE.name} WHERE `{TAGS_TABLE.columns.id.name}` in ({tags_ids_str})'
try:
query_result = metadata_server_api.query_rows(sql).get('results')
for tag in query_result:
tag_parent_links = tag.get(TAGS_TABLE.columns.parent_links.key, [])
tag_sub_links = tag.get(TAGS_TABLE.columns.sub_links.key, [])
export_data.append({
'_id': tag.get(TAGS_TABLE.columns.id.name, ''),
'_tag_name': tag.get(TAGS_TABLE.columns.name.name, ''),
'_tag_color': tag.get(TAGS_TABLE.columns.color.name, ''),
'_tag_parent_links': [link_info.get('row_id', '') for link_info in tag_parent_links],
'_tag_sub_links': [link_info.get('row_id', '') for link_info in tag_sub_links],
})
response = HttpResponse(
json.dumps(export_data, ensure_ascii=False),
content_type='application/json'
)
response['Content-Disposition'] = 'attachment; filename="tags.json"'
return response
except Exception as e:
logger.error(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
class MetadataImportTags(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def _handle_tag_links(self, new_tags, existing_tags, exist_tags_id_map, imported_existing_tags, resp, tags_table):
exist_tags_ids = [tag.get(tags_table.columns.id.name, '') for tag in existing_tags]
all_tags = new_tags + imported_existing_tags
tags_id_map = {}
imported_tags_ids = [tag_data.get(tags_table.columns.id.name, '') for tag_data in all_tags]
for index, tag in enumerate(new_tags):
old_tag_id = tag.get(tags_table.columns.id.name, '')
tag[tags_table.columns.id.name] = resp.get('row_ids', [])[index]
tags_id_map[old_tag_id] = tag.get(tags_table.columns.id.name, '')
tags_id_map.update(exist_tags_id_map)
processed_tags = [] # remove some non-existent tag ids
for tag in new_tags:
child_tags_ids = tag.get(tags_table.columns.sub_links.key, [])
new_child_tags_ids = list(set(child_tags_ids) & set(imported_tags_ids))
tag[tags_table.columns.sub_links.key] = new_child_tags_ids
processed_tags.append(tag)
for tag in imported_existing_tags:
child_tags_ids = tag.get(tags_table.columns.sub_links.key, [])
new_child_tags_ids = list(set(child_tags_ids) & set(imported_tags_ids))
tag[tags_table.columns.sub_links.key] = new_child_tags_ids
# Update the imported tag ID to an existing tag ID on the server
tag[tags_table.columns.id.name] = tags_id_map[tag.get(tags_table.columns.id.name, '')]
processed_tags.append(tag)
child_links_map = {}
# old child links -> new child links and remove exist tags
for tag in processed_tags:
tag_id = tag.get(tags_table.columns.id.name, '')
old_child_links = tag.get(tags_table.columns.sub_links.key, [])
new_child_links = [tags_id_map[link] for link in old_child_links if link in tags_id_map]
formatted_child_links = list(set(new_child_links) - set(exist_tags_ids))
if formatted_child_links:
child_links_map[tag_id] = formatted_child_links
return child_links_map
def _get_existing_tags(self, metadata_server_api, tag_names, tags_table):
tag_names_str = ', '.join([f'"{tag_name}"' for tag_name in tag_names])
sql = f'SELECT * FROM {tags_table.name} WHERE `{tags_table.columns.name.name}` in ({tag_names_str})'
exist_rows = metadata_server_api.query_rows(sql)
existing_tags = exist_rows.get('results', [])
for item in existing_tags:
tag_sub_links = item.get('_tag_sub_links', [])
if tag_sub_links:
sub_links = []
for link in tag_sub_links:
sub_links.append(link['row_id'])
item['_tag_sub_links'] = sub_links
return existing_tags
def _classify_tags(self, file_content, existing_tags, tags_table):
new_tags = []
imported_existing_tags = []
existing_id_map = {}
if existing_tags:
existing_tag_names = [tag.get(tags_table.columns.name.name, '') for tag in existing_tags]
processed_names = set()
for tag_data in file_content:
tag_name = tag_data.get(tags_table.columns.name.name, '')
if tag_name in existing_tag_names and tag_name not in processed_names:
idx = existing_tag_names.index(tag_name)
imported_existing_tags.append(tag_data)
existing_id_map[tag_data.get(tags_table.columns.id.name, '')] = (
existing_tags[idx].get(tags_table.columns.id.name, '')
)
elif tag_name not in processed_names:
new_tags.append(tag_data)
processed_names.add(tag_name)
else:
new_tags = file_content
return new_tags, imported_existing_tags, existing_id_map
def post(self, request, repo_id):
file = request.FILES.get('file', None)
if not file:
return api_error(status.HTTP_400_BAD_REQUEST, 'file invalid')
metadata = RepoMetadata.objects.filter(repo_id=repo_id).first()
if not metadata or not metadata.enabled or not metadata.tags_enabled:
error_msg = f'The tags is disabled for repo {repo_id}.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = f'Library {repo_id} not found.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
if not can_read_metadata(request, repo_id):
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
metadata_server_api = MetadataServerAPI(repo_id, request.user.username)
from seafevents.repo_metadata.constants import TAGS_TABLE
try:
tags_table = get_table_by_name(metadata_server_api, TAGS_TABLE.name)
if not tags_table:
return api_error(status.HTTP_404_NOT_FOUND, 'tags table not found')
tags_table_id = tags_table['id']
file_content = json.loads(file.read().decode('utf-8'))
tag_names = [tag.get(TAGS_TABLE.columns.name.name, '') for tag in file_content]
if not tag_names:
return Response({'success': True})
existing_tags = self._get_existing_tags(metadata_server_api, tag_names, TAGS_TABLE)
new_tags, imported_existing_tags, existing_id_map = self._classify_tags(
file_content, existing_tags, TAGS_TABLE
)
if new_tags:
create_tags_data = [
{
TAGS_TABLE.columns.name.name: tag.get(TAGS_TABLE.columns.name.name, ''),
TAGS_TABLE.columns.color.name: tag.get(TAGS_TABLE.columns.color.name, '')
}
for tag in new_tags
]
resp = metadata_server_api.insert_rows(tags_table_id, create_tags_data)
else:
return Response({'success': True})
# child links map structure: {tag_id: [child_tag_id1, child_tag_id2], ....}
child_links_map = self._handle_tag_links(
new_tags, existing_tags, existing_id_map,
imported_existing_tags, resp, TAGS_TABLE
)
if child_links_map:
metadata_server_api.insert_link(TAGS_TABLE.self_link_id, tags_table_id, child_links_map, True)
except Exception as e:
logger.exception(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
return Response({'success': True})

View File

@@ -3,7 +3,7 @@ from .apis import MetadataRecognizeFaces, MetadataRecords, MetadataManage, Metad
MetadataFolders, MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView, MetadataViewsDuplicateView, FacesRecords, \
FaceRecognitionManage, FacesRecord, MetadataExtractFileDetails, PeoplePhotos, MetadataTagsStatusManage, MetadataTags, \
MetadataTagsLinks, MetadataFileTags, MetadataTagFiles, MetadataMergeTags, MetadataTagsFiles, MetadataDetailsSettingsView, \
MetadataOCRManageView, PeopleCoverPhoto, MetadataMigrateTags
MetadataOCRManageView, PeopleCoverPhoto, MetadataMigrateTags, MetadataExportTags, MetadataImportTags
urlpatterns = [
re_path(r'^$', MetadataManage.as_view(), name='api-v2.1-metadata'),
@@ -44,4 +44,6 @@ urlpatterns = [
re_path(r'^merge-tags/$', MetadataMergeTags.as_view(), name='api-v2.1-metadata-merge-tags'),
re_path(r'^tags-files/$', MetadataTagsFiles.as_view(), name='api-v2.1-metadata-tags-files'),
re_path(r'^migrate-tags/$', MetadataMigrateTags.as_view(), name='api-v2.1-metadata-migrate-tags'),
re_path(r'^export-tags/$', MetadataExportTags.as_view(), name='api-v2.1-metadata-export-tags'),
re_path(r'^import-tags/$', MetadataImportTags.as_view(), name='api-v2.1-metadata-import-tags'),
]