1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-08-22 08:47:22 +00:00

add obj_id column (#6540)

* add obj_id column

* add size suffix file detail column

* dir remove obj_id
This commit is contained in:
JoinTyang 2024-08-15 11:43:35 +08:00 committed by GitHub
parent ad48b1cecf
commit ab0fe3041e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 126 additions and 83 deletions

View File

@ -264,6 +264,7 @@ class Store {
let valid_id_original_row_updates = {};
let valid_id_old_row_data = {};
let valid_id_original_old_row_data = {};
let id_obj_id = {};
originalRows.forEach(row => {
if (!row || !this.context.canModifyRow(row)) {
return;
@ -271,6 +272,7 @@ class Store {
const rowId = row._id;
valid_row_ids.push(rowId);
valid_id_row_updates[rowId] = id_row_updates[rowId];
id_obj_id[rowId] = row._obj_id;
valid_id_original_row_updates[rowId] = id_original_row_updates[rowId];
valid_id_old_row_data[rowId] = id_old_row_data[rowId];
valid_id_original_old_row_data[rowId] = id_original_old_row_data[rowId];
@ -286,6 +288,7 @@ class Store {
id_old_row_data: valid_id_old_row_data,
id_original_old_row_data: valid_id_original_old_row_data,
is_copy_paste,
id_obj_id: id_obj_id
});
this.applyOperation(operation);
}

View File

@ -19,7 +19,7 @@ export const OPERATION_TYPE = {
export const OPERATION_ATTRIBUTES = {
[OPERATION_TYPE.MODIFY_RECORD]: ['repo_id', 'row_id', 'updates', 'old_row_data', 'original_updates', 'original_old_row_data'],
[OPERATION_TYPE.MODIFY_RECORDS]: ['repo_id', 'row_ids', 'id_row_updates', 'id_original_row_updates', 'id_old_row_data', 'id_original_old_row_data', 'is_copy_paste'],
[OPERATION_TYPE.MODIFY_RECORDS]: ['repo_id', 'row_ids', 'id_row_updates', 'id_original_row_updates', 'id_old_row_data', 'id_original_old_row_data', 'is_copy_paste', 'id_obj_id'],
[OPERATION_TYPE.RESTORE_RECORDS]: ['repo_id', 'rows_data', 'original_rows', 'link_infos', 'upper_row_ids'],
[OPERATION_TYPE.RELOAD_RECORDS]: ['repo_id', 'row_ids'],
[OPERATION_TYPE.MODIFY_FILTERS]: ['repo_id', 'view_id', 'filter_conjunction', 'filters'],

View File

@ -20,9 +20,9 @@ class ServerOperator {
break;
}
case OPERATION_TYPE.MODIFY_RECORDS: {
const { repo_id, row_ids, id_row_updates, is_copy_paste } = operation;
const { repo_id, row_ids, id_row_updates, is_copy_paste, id_obj_id } = operation;
const rowsData = row_ids.map(rowId => {
return { record_id: rowId, record: id_row_updates[rowId] };
return { record_id: rowId, record: id_row_updates[rowId], obj_id: id_obj_id[rowId] };
});
window.sfMetadataContext.modifyRecords(repo_id, rowsData, is_copy_paste).then(res => {
callback({ operation });

View File

@ -244,7 +244,6 @@ class MetadataRecords(APIView):
metadata_server_api = MetadataServerAPI(repo_id, request.user.username)
from seafevents.repo_metadata.utils import METADATA_TABLE
columns = []
try:
columns_data = metadata_server_api.list_columns(METADATA_TABLE.id)
columns = columns_data.get('columns', [])
@ -255,33 +254,70 @@ class MetadataRecords(APIView):
sys_column_names = [column.get('name') for column in get_sys_columns()]
rows = []
record_id_to_record = {}
obj_id_to_record = {}
sql = f'SELECT `_id`, `_obj_id`, `_file_modifier` FROM `{METADATA_TABLE.name}` WHERE '
parameters = []
for record_data in records_data:
record_id = record_data.get('record_id', '')
record = record_data.get('record', {})
if record_id:
flag = False
obj_id = record_data.get('obj_id', '')
record_id = record_data.get('record_id', '')
if not record_id:
error_msg = 'record_id invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
sql += f' `{METADATA_TABLE.columns.id.name}` = ? OR '
parameters.append(record_id)
record_id_to_record[record_id] = record
if obj_id and obj_id != '0000000000000000000000000000000000000000':
sql += f' `{METADATA_TABLE.columns.obj_id.name}` = ? OR '
parameters.append(obj_id)
obj_id_to_record[obj_id] = record
sql = sql.rstrip('OR ')
sql += ';'
try:
query_result = metadata_server_api.query_rows(sql, parameters)
except Exception as e:
logger.exception(e)
error_msg = 'Internal Server Error'
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, error_msg)
results = query_result.get('results')
if not results:
# file or folder has been deleted
return Response({'success': True})
rows = []
for record in results:
obj_id = record.get('_obj_id')
record_id = record.get('_id')
to_updated_record = record_id_to_record.get(record_id)
if not to_updated_record:
to_updated_record = obj_id_to_record.get(obj_id)
update = {
METADATA_TABLE.columns.id.name: record_id,
}
for column_name, value in record.items():
for column_name, value in to_updated_record.items():
if column_name not in sys_column_names:
try:
column = next(column for column in columns if column['name'] == column_name)
flag = True
if value and column['type'] == 'date':
column_data = column.get('data', {})
format = column_data.get('format', 'YYYY-MM-DD')
datetime_obj = datetime.strptime(value, '%Y-%m-%d %H:%M' if 'HH:mm' in format else '%Y-%m-%d')
datetime_obj = datetime.strptime(value,
'%Y-%m-%d %H:%M' if 'HH:mm' in format else '%Y-%m-%d')
update[column_name] = datetime_to_isoformat_timestr(datetime_obj)
elif column['type'] == 'single-select' and not value:
update[column_name] = None
else:
update[column_name] = value
rows.append(update)
except Exception as e:
pass
if flag:
rows.append(update)
if rows:
try:
metadata_server_api.update_rows(METADATA_TABLE.id, rows)

View File

@ -43,7 +43,11 @@ def get_sys_columns():
METADATA_TABLE.columns.file_name.to_dict(),
METADATA_TABLE.columns.is_dir.to_dict(),
METADATA_TABLE.columns.file_type.to_dict(),
METADATA_TABLE.columns.location.to_dict()
METADATA_TABLE.columns.location.to_dict(),
METADATA_TABLE.columns.obj_id.to_dict(),
METADATA_TABLE.columns.size.to_dict(),
METADATA_TABLE.columns.suffix.to_dict(),
METADATA_TABLE.columns.file_details.to_dict(),
]
return columns