mirror of
https://github.com/haiwen/seahub.git
synced 2025-09-04 16:31:13 +00:00
feat(wiki): add wiki es search sup (#7087)
* feat(wiki): add wiki es search sup * chore: optimize wiki check * chore: fix index_local path * fix: fix unit test
This commit is contained in:
@@ -657,7 +657,7 @@ def get_seafes_env():
|
|||||||
def update_search_index():
|
def update_search_index():
|
||||||
argv = [
|
argv = [
|
||||||
Utils.get_python_executable(),
|
Utils.get_python_executable(),
|
||||||
'-m', 'seafes.index_local',
|
'-m', 'seafes.indexes.repo_file.index_local',
|
||||||
'--loglevel', 'debug',
|
'--loglevel', 'debug',
|
||||||
'update',
|
'update',
|
||||||
]
|
]
|
||||||
@@ -677,7 +677,7 @@ def delete_search_index():
|
|||||||
|
|
||||||
argv = [
|
argv = [
|
||||||
Utils.get_python_executable(),
|
Utils.get_python_executable(),
|
||||||
'-m', 'seafes.index_local',
|
'-m', 'seafes.indexes.repo_file.index_local',
|
||||||
'--loglevel', 'debug',
|
'--loglevel', 'debug',
|
||||||
'clear',
|
'clear',
|
||||||
]
|
]
|
||||||
|
@@ -5,6 +5,7 @@ import time
|
|||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
@@ -22,6 +23,7 @@ from seahub.utils import get_log_events_by_time, is_pro_version, is_org_context
|
|||||||
from seahub.settings import SEADOC_PRIVATE_KEY, FILE_CONVERTER_SERVER_URL, SECRET_KEY, \
|
from seahub.settings import SEADOC_PRIVATE_KEY, FILE_CONVERTER_SERVER_URL, SECRET_KEY, \
|
||||||
SEAFEVENTS_SERVER_URL
|
SEAFEVENTS_SERVER_URL
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from seahub.settings import MULTI_TENANCY
|
from seahub.settings import MULTI_TENANCY
|
||||||
except ImportError:
|
except ImportError:
|
||||||
@@ -319,12 +321,3 @@ def event_export_status(task_id):
|
|||||||
resp = requests.get(url, params=params, headers=headers)
|
resp = requests.get(url, params=params, headers=headers)
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def wiki_search(params):
|
|
||||||
payload = {'exp': int(time.time()) + 300, }
|
|
||||||
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
|
|
||||||
headers = {"Authorization": "Token %s" % token}
|
|
||||||
url = urljoin(SEAFEVENTS_SERVER_URL, '/wiki-search')
|
|
||||||
resp = requests.post(url, json=params, headers=headers)
|
|
||||||
return resp
|
|
||||||
|
@@ -22,7 +22,9 @@ from django.utils.translation import gettext as _
|
|||||||
from seahub.api2.authentication import TokenAuthentication
|
from seahub.api2.authentication import TokenAuthentication
|
||||||
from seahub.api2.throttling import UserRateThrottle
|
from seahub.api2.throttling import UserRateThrottle
|
||||||
from seahub.api2.utils import api_error, is_wiki_repo
|
from seahub.api2.utils import api_error, is_wiki_repo
|
||||||
from seahub.api2.endpoints.utils import wiki_search
|
from seahub.utils import HAS_FILE_SEARCH, HAS_FILE_SEASEARCH
|
||||||
|
if HAS_FILE_SEARCH or HAS_FILE_SEASEARCH:
|
||||||
|
from seahub.search.utils import search_wikis, ai_search_wikis
|
||||||
from seahub.utils.db_api import SeafileDB
|
from seahub.utils.db_api import SeafileDB
|
||||||
from seahub.wiki2.models import Wiki2 as Wiki
|
from seahub.wiki2.models import Wiki2 as Wiki
|
||||||
from seahub.wiki.models import Wiki as OldWiki
|
from seahub.wiki.models import Wiki as OldWiki
|
||||||
@@ -33,7 +35,7 @@ from seahub.wiki2.utils import is_valid_wiki_name, get_wiki_config, WIKI_PAGES_D
|
|||||||
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, add_convert_wiki_task
|
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, add_convert_wiki_task
|
||||||
|
|
||||||
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
|
from seahub.utils import is_org_context, get_user_repos, is_pro_version, is_valid_dirent_name, \
|
||||||
get_no_duplicate_obj_name
|
get_no_duplicate_obj_name, HAS_FILE_SEARCH, HAS_FILE_SEASEARCH
|
||||||
|
|
||||||
from seahub.views import check_folder_permission
|
from seahub.views import check_folder_permission
|
||||||
from seahub.base.templatetags.seahub_tags import email2nickname
|
from seahub.base.templatetags.seahub_tags import email2nickname
|
||||||
@@ -1290,6 +1292,10 @@ class WikiSearch(APIView):
|
|||||||
throttle_classes = (UserRateThrottle, )
|
throttle_classes = (UserRateThrottle, )
|
||||||
|
|
||||||
def post(self, request):
|
def post(self, request):
|
||||||
|
if not HAS_FILE_SEARCH and not HAS_FILE_SEASEARCH:
|
||||||
|
error_msg = 'Search not supported.'
|
||||||
|
return api_error(status.HTTP_404_NOT_FOUND, error_msg)
|
||||||
|
|
||||||
query = request.data.get('query')
|
query = request.data.get('query')
|
||||||
search_wiki = request.data.get('search_wiki')
|
search_wiki = request.data.get('search_wiki')
|
||||||
|
|
||||||
@@ -1310,18 +1316,25 @@ class WikiSearch(APIView):
|
|||||||
'wiki': search_wiki,
|
'wiki': search_wiki,
|
||||||
'count': count,
|
'count': count,
|
||||||
}
|
}
|
||||||
|
if HAS_FILE_SEARCH:
|
||||||
try:
|
try:
|
||||||
resp = wiki_search(params)
|
results = search_wikis(search_wiki, query, count)
|
||||||
if resp.status_code == 500:
|
except Exception as e:
|
||||||
logger.error('search in wiki error status: %s body: %s', resp.status_code, resp.text)
|
logger.error(e)
|
||||||
|
results = []
|
||||||
|
finally:
|
||||||
|
return Response({"results": results})
|
||||||
|
elif HAS_FILE_SEASEARCH:
|
||||||
|
try:
|
||||||
|
resp = ai_search_wikis(params)
|
||||||
|
if resp.status_code == 500:
|
||||||
|
logger.error('search in wiki error status: %s body: %s', resp.status_code, resp.text)
|
||||||
|
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
|
||||||
|
resp_json = resp.json()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
|
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
|
||||||
resp_json = resp.json()
|
return Response(resp_json, resp.status_code)
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
|
|
||||||
|
|
||||||
return Response(resp_json, resp.status_code)
|
|
||||||
|
|
||||||
|
|
||||||
class WikiConvertView(APIView):
|
class WikiConvertView(APIView):
|
||||||
|
@@ -19,7 +19,7 @@ import seaserv
|
|||||||
from seaserv import seafile_api
|
from seaserv import seafile_api
|
||||||
|
|
||||||
os.environ['EVENTS_CONFIG_FILE'] = EVENTS_CONFIG_FILE
|
os.environ['EVENTS_CONFIG_FILE'] = EVENTS_CONFIG_FILE
|
||||||
from seafes import es_search
|
from seafes import es_search, es_wiki_search
|
||||||
|
|
||||||
# Get an instance of a logger
|
# Get an instance of a logger
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -203,6 +203,10 @@ def search_files(repos_map, search_path, keyword, obj_desc, start, size, org_id=
|
|||||||
return result, total
|
return result, total
|
||||||
|
|
||||||
|
|
||||||
|
def search_wikis(wiki_id, keyword, count):
|
||||||
|
return es_wiki_search(wiki_id, keyword, count)
|
||||||
|
|
||||||
|
|
||||||
def ai_search_files(keyword, searched_repos, count, suffixes, search_path=None, obj_type=None):
|
def ai_search_files(keyword, searched_repos, count, suffixes, search_path=None, obj_type=None):
|
||||||
params = {
|
params = {
|
||||||
'query': keyword,
|
'query': keyword,
|
||||||
@@ -226,6 +230,15 @@ def ai_search_files(keyword, searched_repos, count, suffixes, search_path=None,
|
|||||||
return files_found, total
|
return files_found, total
|
||||||
|
|
||||||
|
|
||||||
|
def ai_search_wikis(params):
|
||||||
|
payload = {'exp': int(time.time()) + 300, }
|
||||||
|
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
|
||||||
|
headers = {"Authorization": "Token %s" % token}
|
||||||
|
url = urljoin(SEAFEVENTS_SERVER_URL, '/wiki-search')
|
||||||
|
resp = requests.post(url, json=params, headers=headers)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
def is_valid_date_type(data):
|
def is_valid_date_type(data):
|
||||||
try:
|
try:
|
||||||
data = int(data)
|
data = int(data)
|
||||||
|
Reference in New Issue
Block a user