Skip to content

Commit

Permalink
feat/seasearch: add wiki search api
Browse files Browse the repository at this point in the history
  • Loading branch information
cir9no committed Aug 21, 2024
1 parent 1161d43 commit 0dab3d8
Show file tree
Hide file tree
Showing 5 changed files with 92 additions and 5 deletions.
9 changes: 9 additions & 0 deletions seahub/api2/endpoints/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,3 +316,12 @@ def event_export_status(task_id):
resp = requests.get(url, params=params, headers=headers)

return resp


def wiki_search(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/wiki-search')
resp = requests.post(url, json=params, headers=headers)
return resp
63 changes: 59 additions & 4 deletions seahub/api2/endpoints/wiki2.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,26 +20,30 @@
from seaserv import seafile_api, edit_repo
from pysearpc import SearpcError
from django.utils.translation import gettext as _
from django.core.cache import cache

from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo
from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo, get_search_wiki_ids
from seahub.api2.endpoints.utils import wiki_search
from seahub.utils.db_api import SeafileDB
from seahub.wiki2.models import Wiki2 as Wiki
from seahub.wiki2.models import WikiPageTrash
from seahub.wiki2.utils import is_valid_wiki_name, can_edit_wiki, get_wiki_dirs_by_path, \
get_wiki_config, WIKI_PAGES_DIR, WIKI_CONFIG_PATH, WIKI_CONFIG_FILE_NAME, is_group_wiki, \
check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \
get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, SEARCH_WIKIS_LIMIT, \
RELATED_WIKIS_PREFIX, RELATED_WIKIS_CACHE_TIMEOUT

from seahub.utils import is_org_context, get_user_repos, gen_inner_file_get_url, gen_file_upload_url, \
normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name
normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name, \
normalize_cache_key
from seahub.views import check_folder_permission
from seahub.views.file import send_file_access_msg
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.utils.file_op import check_file_lock, ONLINE_OFFICE_LOCK_OWNER, if_locked_by_online_office
from seahub.utils.repo import parse_repo_perm, get_repo_owner
from seahub.utils.repo import parse_repo_perm, get_repo_owner, is_valid_repo_id_format
from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid
from seahub.settings import SEADOC_SERVER_URL, ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
ENCRYPTED_LIBRARY_VERSION
Expand Down Expand Up @@ -1074,3 +1078,54 @@ def delete(self, request, wiki_id):
return Response({'success': True})


class WikiSearch(APIView):
# authentication_classes = (TokenAuthentication, SessionAuthentication)
# permission_classes = (IsAuthenticated, )
# throttle_classes = (UserRateThrottle, )

def post(self, request):
request.user.username = '[email protected]'
query = request.data.get('query')
search_wiki = request.data.get('search_wiki', 'all')

try:
count = int(request.data.get('count'))
except:
count = 20

if not query:
return api_error(status.HTTP_400_BAD_REQUEST, 'wiki search query invalid')

if not is_valid_repo_id_format(search_wiki) and search_wiki != 'all':
error_msg = 'search_wiki invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

if search_wiki == 'all':
org_id = request.user.org.org_id if is_org_context(request) else None

username = request.user.username
key = normalize_cache_key(username, RELATED_WIKIS_PREFIX)

wikis = cache.get(key, [])
if not wikis:
wikis = get_search_wiki_ids(username, org_id)[:SEARCH_WIKIS_LIMIT]
cache.set(key, wikis, RELATED_WIKIS_CACHE_TIMEOUT)
else:
wikis = search_wiki
params = {
'query': query,
'wikis': wikis,
'count': count,
}

try:
resp = wiki_search(params)
if resp.status_code == 500:
logger.error('search in wiki error status: %s body: %s', resp.status_code, resp.text)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
resp_json = resp.json()
except Exception as e:
logger.error(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

return Response(resp_json, resp.status_code)
18 changes: 18 additions & 0 deletions seahub/api2/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,24 @@ def get_search_repos(username, org_id):

return repos

def get_search_wiki_ids(username, org_id):
owned_wikis, shared_wikis, group_wikis, public_wikis = get_user_repos(username, org_id=org_id)
wiki_list = owned_wikis + shared_wikis + group_wikis

wiki_id_set = set()
for wiki in wiki_list:
if not is_wiki_repo(wiki):
continue
wiki_id = wiki.id
if wiki.origin_repo_id:
wiki_id = wiki.origin_repo_id

if wiki_id in wiki_id_set:
continue
wiki_id_set.add(wiki_id)

return list(wiki_id_set)

def send_share_link_emails(emails, fs, shared_from):
subject = _("A share link for you")
for email in emails:
Expand Down
3 changes: 2 additions & 1 deletion seahub/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@
from seahub.ai.apis import Search
from seahub.wiki2.views import wiki_view
from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, \
Wiki2DuplicatePageView, WikiPageTrashView
Wiki2DuplicatePageView, WikiPageTrashView, WikiSearch
from seahub.api2.endpoints.subscription import SubscriptionView, SubscriptionPlansView, SubscriptionLogsView
from seahub.api2.endpoints.metadata_manage import MetadataRecords, MetadataManage, MetadataColumns, MetadataRecordInfo, \
MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView
Expand Down Expand Up @@ -543,6 +543,7 @@
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/page/(?P<page_id>[-0-9a-zA-Z]{4})/$', Wiki2PageView.as_view(), name='api-v2.1-wiki2-page'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/duplicate-page/$', Wiki2DuplicatePageView.as_view(), name='api-v2.1-wiki2-duplicate-page'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/trash/', WikiPageTrashView.as_view(), name='api-v2.1-wiki2-trash'),
re_path(r'^api/v2.1/wiki2/search/$', WikiSearch.as_view(), name='api-v2.1-wiki2-search'),

## user::drafts
re_path(r'^api/v2.1/drafts/$', DraftsView.as_view(), name='api-v2.1-drafts'),
Expand Down
4 changes: 4 additions & 0 deletions seahub/wiki2/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
WIKI_CONFIG_PATH = '_Internal/Wiki'
WIKI_CONFIG_FILE_NAME = 'index.json'

SEARCH_WIKIS_LIMIT = 200
RELATED_WIKIS_PREFIX = 'RELATED_WIKIS_'
RELATED_WIKIS_CACHE_TIMEOUT = 2 * 60 * 60


def is_valid_wiki_name(name):
name = name.strip()
Expand Down

0 comments on commit 0dab3d8

Please sign in to comment.