diff --git a/seahub/api2/endpoints/utils.py b/seahub/api2/endpoints/utils.py index 16a81f4bf96..a5a75e62393 100644 --- a/seahub/api2/endpoints/utils.py +++ b/seahub/api2/endpoints/utils.py @@ -319,3 +319,12 @@ def event_export_status(task_id): resp = requests.get(url, params=params, headers=headers) return resp + + +def wiki_search(params): + payload = {'exp': int(time.time()) + 300, } + token = jwt.encode(payload, SECRET_KEY, algorithm='HS256') + headers = {"Authorization": "Token %s" % token} + url = urljoin(SEAFEVENTS_SERVER_URL, '/wiki-search') + resp = requests.post(url, json=params, headers=headers) + return resp diff --git a/seahub/api2/endpoints/wiki2.py b/seahub/api2/endpoints/wiki2.py index baff9bae42f..70b4adde415 100644 --- a/seahub/api2/endpoints/wiki2.py +++ b/seahub/api2/endpoints/wiki2.py @@ -21,10 +21,12 @@ from seaserv import seafile_api, edit_repo from pysearpc import SearpcError from django.utils.translation import gettext as _ +from django.core.cache import cache from seahub.api2.authentication import TokenAuthentication from seahub.api2.throttling import UserRateThrottle -from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo +from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo, get_search_wiki_ids +from seahub.api2.endpoints.utils import wiki_search from seahub.utils.db_api import SeafileDB from seahub.wiki2.models import Wiki2 as Wiki from seahub.wiki2.models import WikiPageTrash, Wiki2Publish @@ -32,14 +34,16 @@ get_wiki_config, WIKI_PAGES_DIR, WIKI_CONFIG_PATH, WIKI_CONFIG_FILE_NAME, is_group_wiki, \ check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \ get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \ - delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack + delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, SEARCH_WIKIS_LIMIT, \ + RELATED_WIKIS_PREFIX, RELATED_WIKIS_CACHE_TIMEOUT from seahub.utils import is_org_context, get_user_repos, gen_inner_file_get_url, gen_file_upload_url, \ - normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name + normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name, \ + normalize_cache_key from seahub.views import check_folder_permission from seahub.base.templatetags.seahub_tags import email2nickname from seahub.utils.file_op import check_file_lock, ONLINE_OFFICE_LOCK_OWNER, if_locked_by_online_office -from seahub.utils.repo import parse_repo_perm, get_repo_owner +from seahub.utils.repo import parse_repo_perm, get_repo_owner, is_valid_repo_id_format from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid from seahub.settings import SEADOC_SERVER_URL, ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \ ENCRYPTED_LIBRARY_VERSION @@ -1267,3 +1271,55 @@ def delete(self, request, wiki_id): if publish_config: publish_config.delete() return Response({'success': True}) + + +class WikiSearch(APIView): + authentication_classes = (TokenAuthentication, SessionAuthentication) + permission_classes = (IsAuthenticated, ) + throttle_classes = (UserRateThrottle, ) + + def post(self, request): + query = request.data.get('query') + search_wiki = request.data.get('search_wiki', 'all') + + try: + count = int(request.data.get('count')) + except: + count = 20 + + if not query: + return api_error(status.HTTP_400_BAD_REQUEST, 'wiki search query invalid') + + if not is_valid_repo_id_format(search_wiki) and search_wiki != 'all': + error_msg = 'search_wiki invalid.' + return api_error(status.HTTP_400_BAD_REQUEST, error_msg) + + if search_wiki == 'all': + org_id = request.user.org.org_id if is_org_context(request) else None + + username = request.user.username + key = normalize_cache_key(username, RELATED_WIKIS_PREFIX) + + wikis = cache.get(key, []) + if not wikis: + wikis = get_search_wiki_ids(username, org_id)[:SEARCH_WIKIS_LIMIT] + cache.set(key, wikis, RELATED_WIKIS_CACHE_TIMEOUT) + else: + wikis = search_wiki + params = { + 'query': query, + 'wikis': wikis, + 'count': count, + } + + try: + resp = wiki_search(params) + if resp.status_code == 500: + logger.error('search in wiki error status: %s body: %s', resp.status_code, resp.text) + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') + resp_json = resp.json() + except Exception as e: + logger.error(e) + return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error') + + return Response(resp_json, resp.status_code) diff --git a/seahub/api2/utils.py b/seahub/api2/utils.py index f57bb1e9b8f..bbdb54d69ef 100644 --- a/seahub/api2/utils.py +++ b/seahub/api2/utils.py @@ -313,6 +313,24 @@ def get_search_repos(username, org_id): return repos +def get_search_wiki_ids(username, org_id): + owned_wikis, shared_wikis, group_wikis, public_wikis = get_user_repos(username, org_id=org_id) + wiki_list = owned_wikis + shared_wikis + group_wikis + + wiki_id_set = set() + for wiki in wiki_list: + if not is_wiki_repo(wiki): + continue + wiki_id = wiki.id + if wiki.origin_repo_id: + wiki_id = wiki.origin_repo_id + + if wiki_id in wiki_id_set: + continue + wiki_id_set.add(wiki_id) + + return list(wiki_id_set) + def send_share_link_emails(emails, fs, shared_from): subject = _("A share link for you") for email in emails: diff --git a/seahub/urls.py b/seahub/urls.py index 735a798b9e6..4767da6e6ed 100644 --- a/seahub/urls.py +++ b/seahub/urls.py @@ -208,7 +208,8 @@ from seahub.ocm.settings import OCM_ENDPOINT from seahub.wiki2.views import wiki_view, wiki_publish_view from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, \ - Wiki2DuplicatePageView, WikiPageTrashView, Wiki2PublishView, Wiki2PublishConfigView, Wiki2PublishPageView + Wiki2DuplicatePageView, WikiPageTrashView, Wiki2PublishView, Wiki2PublishConfigView, Wiki2PublishPageView, \ + WikiSearch from seahub.api2.endpoints.subscription import SubscriptionView, SubscriptionPlansView, SubscriptionLogsView from seahub.api2.endpoints.metadata_manage import MetadataRecords, MetadataManage, MetadataColumns, MetadataRecordInfo, \ MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView, MetadataViewsDuplicateView @@ -553,6 +554,8 @@ re_path(r'^api/v2.1/wiki2/(?P[-0-9a-f]{36})/duplicate-page/$', Wiki2DuplicatePageView.as_view(), name='api-v2.1-wiki2-duplicate-page'), re_path(r'^api/v2.1/wiki2/(?P[-0-9a-f]{36})/trash/', WikiPageTrashView.as_view(), name='api-v2.1-wiki2-trash'), re_path(r'^api/v2.1/wiki2/(?P[-0-9a-f]{36})/publish/$', Wiki2PublishView.as_view(), name='api-v2.1-wiki2-publish'), + re_path(r'^api/v2.1/wiki2/search/$', WikiSearch.as_view(), name='api-v2.1-wiki2-search'), + ## user::drafts re_path(r'^api/v2.1/drafts/$', DraftsView.as_view(), name='api-v2.1-drafts'), re_path(r'^api/v2.1/drafts/(?P\d+)/$', DraftView.as_view(), name='api-v2.1-draft'), diff --git a/seahub/wiki2/utils.py b/seahub/wiki2/utils.py index ac3dc3ab4b1..e5b577a9b21 100644 --- a/seahub/wiki2/utils.py +++ b/seahub/wiki2/utils.py @@ -22,6 +22,10 @@ WIKI_CONFIG_PATH = '_Internal/Wiki' WIKI_CONFIG_FILE_NAME = 'index.json' +SEARCH_WIKIS_LIMIT = 200 +RELATED_WIKIS_PREFIX = 'RELATED_WIKIS_' +RELATED_WIKIS_CACHE_TIMEOUT = 2 * 60 * 60 + def is_valid_wiki_name(name): name = name.strip()