1
0
mirror of https://github.com/haiwen/seahub.git synced 2025-05-10 00:47:19 +00:00

Merge pull request from haiwen/face_recognition_menu

face recognition menu
This commit is contained in:
JoinTyang 2025-04-14 11:45:58 +08:00 committed by GitHub
commit 59c719e64f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 122 additions and 5 deletions
frontend/src
hooks
metadata
api.js
constants
hooks
views/table
context-menu
index.js
table-main/records
seahub/repo_metadata

View File

@ -92,6 +92,20 @@ export const MetadataAIOperationsProvider = ({
});
}, [extractFilesDetails]);
const faceRecognition = useCallback((objIds, { success_callback, fail_callback } = {}) => {
const inProgressToaster = toaster.notifyInProgress(gettext('Detecting faces by AI...'), { duration: null });
metadataAPI.recognizeFaces(repoID, objIds).then(res => {
inProgressToaster.close();
toaster.success(gettext('Faces detected'));
success_callback && success_callback();
}).catch(error => {
inProgressToaster.close();
const errorMessage = gettext('Failed to detect faces');
toaster.danger(errorMessage);
fail_callback && fail_callback();
});
}, [repoID]);
return (
<MetadataAIOperationsContext.Provider value={{
enableMetadata,
@ -104,6 +118,7 @@ export const MetadataAIOperationsProvider = ({
generateDescription,
extractFilesDetails,
extractFileDetails,
faceRecognition,
}}>
{children}
</MetadataAIOperationsContext.Provider>

View File

@ -334,6 +334,14 @@ class MetadataManagerAPI {
return this.req.delete(url);
};
recognizeFaces = (repoID, objIds) => {
const url = this.server + '/api/v2.1/repos/' + repoID + '/metadata/recognize-faces/';
const params = {
obj_ids: objIds,
};
return this.req.post(url, params);
};
getFaceData = (repoID, start = 0, limit = 1000) => {
const url = this.server + '/api/v2.1/repos/' + repoID + '/metadata/face-records/?start=' + start + '&limit=' + limit;
return this.req.get(url);

View File

@ -44,6 +44,7 @@ export const EVENT_BUS_TYPE = {
MOVE_RECORD: 'move_record',
DELETE_RECORDS: 'delete_records',
UPDATE_RECORD_DETAILS: 'update_record_details',
UPDATE_FACE_RECOGNITION: 'update_face_recognition',
GENERATE_DESCRIPTION: 'generate_description',
OCR: 'ocr',

View File

@ -38,7 +38,7 @@ export const MetadataViewProvider = ({
const { collaborators } = useCollaborators();
const { isBeingBuilt, setIsBeingBuilt } = useMetadata();
const { onOCR, generateDescription, extractFilesDetails } = useMetadataAIOperations();
const { onOCR, generateDescription, extractFilesDetails, faceRecognition } = useMetadataAIOperations();
const tableChanged = useCallback(() => {
setMetadata(storeRef.current.data);
@ -348,6 +348,15 @@ export const MetadataViewProvider = ({
});
}, [metadata, extractFilesDetails, modifyRecords]);
const updateFaceRecognition = useCallback((records) => {
const recordObjIds = records.map(record => getFileObjIdFromRecord(record));
if (recordObjIds.length > 50) {
toaster.danger(gettext('Select up to 50 files'));
return;
}
faceRecognition(recordObjIds);
}, [faceRecognition]);
const updateRecordDescription = useCallback((record) => {
const parentDir = getParentDirFromRecord(record);
const fileName = getFileNameFromRecord(record);
@ -429,6 +438,7 @@ export const MetadataViewProvider = ({
const unsubscribeMoveRecord = eventBus.subscribe(EVENT_BUS_TYPE.MOVE_RECORD, moveRecord);
const unsubscribeDeleteRecords = eventBus.subscribe(EVENT_BUS_TYPE.DELETE_RECORDS, deleteRecords);
const unsubscribeUpdateDetails = eventBus.subscribe(EVENT_BUS_TYPE.UPDATE_RECORD_DETAILS, updateRecordDetails);
const unsubscribeUpdateFaceRecognition = eventBus.subscribe(EVENT_BUS_TYPE.UPDATE_FACE_RECOGNITION, updateFaceRecognition);
const unsubscribeUpdateDescription = eventBus.subscribe(EVENT_BUS_TYPE.GENERATE_DESCRIPTION, updateRecordDescription);
const unsubscribeOCR = eventBus.subscribe(EVENT_BUS_TYPE.OCR, ocr);
@ -454,6 +464,7 @@ export const MetadataViewProvider = ({
unsubscribeMoveRecord();
unsubscribeDeleteRecords();
unsubscribeUpdateDetails();
unsubscribeUpdateFaceRecognition();
unsubscribeUpdateDescription();
unsubscribeOCR();
delayReloadDataTimer.current && clearTimeout(delayReloadDataTimer.current);
@ -493,6 +504,7 @@ export const MetadataViewProvider = ({
updateCurrentPath: params.updateCurrentPath,
updateSelectedRecordIds,
updateRecordDetails,
updateFaceRecognition,
updateRecordDescription,
ocr,
}}

View File

@ -30,13 +30,14 @@ const OPERATION = {
RENAME_FILE: 'rename-file',
FILE_DETAIL: 'file-detail',
FILE_DETAILS: 'file-details',
DETECT_FACES: 'detect-faces',
MOVE: 'move',
};
const ContextMenu = ({
isGroupView, selectedRange, selectedPosition, recordMetrics, recordGetterByIndex, onClearSelected, onCopySelected,
getTableContentRect, getTableCanvasContainerRect, deleteRecords, selectNone, updateFileTags, moveRecord, addFolder, updateRecordDetails,
updateRecordDescription, ocr,
updateFaceRecognition, updateRecordDescription, ocr,
}) => {
const currentRecord = useRef(null);
@ -119,6 +120,13 @@ const ContextMenu = ({
if (imageOrVideoRecords.length > 0) {
list.push({ value: OPERATION.FILE_DETAILS, label: gettext('Extract file details'), records: imageOrVideoRecords });
}
const imageRecords = records.filter(record => {
const fileName = getFileNameFromRecord(record);
return Utils.imageCheck(fileName);
});
if (imageRecords.length > 0) {
list.push({ value: OPERATION.DETECT_FACES, label: gettext('Detect faces'), records: imageRecords });
}
return list;
}
@ -148,6 +156,17 @@ const ContextMenu = ({
if (imageOrVideoRecords.length > 0) {
list.push({ value: OPERATION.FILE_DETAILS, label: gettext('Extract file details'), records: imageOrVideoRecords });
}
const imageRecords = records.filter(record => {
const isFolder = checkIsDir(record);
if (isFolder) return false;
const canModifyRow = checkCanModifyRow(record);
if (!canModifyRow) return false;
const fileName = getFileNameFromRecord(record);
return Utils.imageCheck(fileName);
});
if (imageRecords.length > 0) {
list.push({ value: OPERATION.DETECT_FACES, label: gettext('Detect faces'), records: imageRecords });
}
return list;
}
@ -193,6 +212,9 @@ const ContextMenu = ({
if (isImage || isVideo) {
aiOptions.push({ value: OPERATION.FILE_DETAIL, label: gettext('Extract file detail'), record: record });
}
if (isImage) {
aiOptions.push({ value: OPERATION.DETECT_FACES, label: gettext('Detect faces'), records: [record] });
}
if (descriptionColumn && isDescribableFile) {
aiOptions.push({
@ -305,6 +327,11 @@ const ContextMenu = ({
updateRecordDetails([record]);
break;
}
case OPERATION.DETECT_FACES: {
const { records } = option;
updateFaceRecognition(records);
break;
}
case OPERATION.MOVE: {
const { record } = option;
if (!record) break;
@ -315,7 +342,7 @@ const ContextMenu = ({
break;
}
}
}, [repoID, onCopySelected, onClearSelected, updateRecordDescription, ocr, deleteRecords, toggleDeleteFolderDialog, selectNone, updateRecordDetails, toggleFileTagsRecord, toggleMoveDialog]);
}, [repoID, onCopySelected, onClearSelected, updateRecordDescription, toggleFileTagsRecord, ocr, deleteRecords, toggleDeleteFolderDialog, selectNone, updateRecordDetails, updateFaceRecognition, toggleMoveDialog]);
useEffect(() => {
const unsubscribeToggleMoveDialog = window.sfMetadataContext.eventBus.subscribe(EVENT_BUS_TYPE.TOGGLE_MOVE_DIALOG, toggleMoveDialog);

View File

@ -30,6 +30,7 @@ const Table = () => {
addFolder,
updateSelectedRecordIds,
updateRecordDetails,
updateFaceRecognition,
updateRecordDescription,
ocr,
} = useMetadataView();
@ -182,6 +183,7 @@ const Table = () => {
addFolder={addFolder}
updateSelectedRecordIds={updateSelectedRecordIds}
updateRecordDetails={updateRecordDetails}
updateFaceRecognition={updateFaceRecognition}
updateRecordDescription={updateRecordDescription}
ocr={ocr}
/>

View File

@ -646,6 +646,7 @@ class Records extends Component {
addFolder={this.props.addFolder}
selectNone={this.selectNone}
updateRecordDetails={this.props.updateRecordDetails}
updateFaceRecognition={this.props.updateFaceRecognition}
updateRecordDescription={this.props.updateRecordDescription}
ocr={this.props.ocr}
/>

View File

@ -13,7 +13,7 @@ from seahub.api2.throttling import UserRateThrottle
from seahub.api2.authentication import TokenAuthentication
from seahub.repo_metadata.models import RepoMetadata, RepoMetadataViews
from seahub.views import check_folder_permission
from seahub.repo_metadata.utils import add_init_metadata_task, gen_unique_id, init_metadata, \
from seahub.repo_metadata.utils import add_init_metadata_task, recognize_faces, gen_unique_id, init_metadata, \
get_unmodifiable_columns, can_read_metadata, init_faces, \
extract_file_details, get_table_by_name, remove_faces_table, FACES_SAVE_PATH, \
init_tags, init_tag_self_link_columns, remove_tags_table, add_init_face_recognition_task, init_ocr, \
@ -1799,6 +1799,46 @@ class MetadataExtractFileDetails(APIView):
return Response({'details': resp})
class MetadataRecognizeFaces(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated,)
throttle_classes = (UserRateThrottle,)
def post(self, request, repo_id):
obj_ids = request.data.get('obj_ids')
if not obj_ids or not isinstance(obj_ids, list) or len(obj_ids) > 50:
error_msg = 'obj_ids is invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)
record = RepoMetadata.objects.filter(repo_id=repo_id).first()
if not record or not record.enabled:
error_msg = f'The metadata module is disabled for repo {repo_id}.'
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
repo = seafile_api.get_repo(repo_id)
if not repo:
error_msg = 'Library %s not found.' % repo_id
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
permission = check_folder_permission(request, repo_id, '/')
if permission != 'rw':
error_msg = 'Permission denied.'
return api_error(status.HTTP_403_FORBIDDEN, error_msg)
params = {
'obj_ids': obj_ids,
'repo_id': repo_id
}
try:
resp = recognize_faces(params=params)
resp_json = resp.json()
except Exception as e:
logger.exception(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
return Response(resp_json, resp.status_code)
# tags
class MetadataTagsStatusManage(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)

View File

@ -1,5 +1,5 @@
from django.urls import re_path
from .apis import MetadataRecords, MetadataManage, MetadataColumns, MetadataRecord, \
from .apis import MetadataRecognizeFaces, MetadataRecords, MetadataManage, MetadataColumns, MetadataRecord, \
MetadataFolders, MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView, MetadataViewsDuplicateView, FacesRecords, \
FaceRecognitionManage, FacesRecord, MetadataExtractFileDetails, PeoplePhotos, MetadataTagsStatusManage, MetadataTags, \
MetadataTagsLinks, MetadataFileTags, MetadataTagFiles, MetadataMergeTags, MetadataTagsFiles, MetadataDetailsSettingsView, \
@ -24,6 +24,7 @@ urlpatterns = [
re_path(r'^people-photos/(?P<people_id>.+)/$', PeoplePhotos.as_view(), name='api-v2.1-metadata-people-photos-get-delete'),
re_path(r'^people-photos/$', PeoplePhotos.as_view(), name='api-v2.1-metadata-people-photos-post'),
re_path(r'^face-recognition/$', FaceRecognitionManage.as_view(), name='api-v2.1-metadata-face-recognition'),
re_path(r'^recognize-faces/$', MetadataRecognizeFaces.as_view(), name='api-v2.1-metadata-recognize-faces'),
re_path(r'^people-cover-photo/(?P<people_id>.+)/$', PeopleCoverPhoto.as_view(), name='api-v2.1-metadata-people-cover-photo'),
re_path(r'^extract-file-details/$', MetadataExtractFileDetails.as_view(), name='api-v2.1-metadata-extract-file-details'),

View File

@ -41,6 +41,16 @@ def extract_file_details(params):
resp = requests.post(url, json=params, headers=headers, timeout=30)
return json.loads(resp.content)['details']
def recognize_faces(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/recognize-faces')
resp = requests.post(url, json=params, headers=headers, timeout=30)
return resp
def update_people_cover_photo(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')