Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat/seasearch: add wiki search api #6606

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions seahub/api2/endpoints/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -319,3 +319,12 @@ def event_export_status(task_id):
resp = requests.get(url, params=params, headers=headers)

return resp


def wiki_search(params):
payload = {'exp': int(time.time()) + 300, }
token = jwt.encode(payload, SECRET_KEY, algorithm='HS256')
headers = {"Authorization": "Token %s" % token}
url = urljoin(SEAFEVENTS_SERVER_URL, '/wiki-search')
resp = requests.post(url, json=params, headers=headers)
return resp
64 changes: 60 additions & 4 deletions seahub/api2/endpoints/wiki2.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,29 @@
from seaserv import seafile_api, edit_repo
from pysearpc import SearpcError
from django.utils.translation import gettext as _
from django.core.cache import cache

from seahub.api2.authentication import TokenAuthentication
from seahub.api2.throttling import UserRateThrottle
from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo
from seahub.api2.utils import api_error, to_python_boolean, is_wiki_repo, get_search_wiki_ids
from seahub.api2.endpoints.utils import wiki_search
from seahub.utils.db_api import SeafileDB
from seahub.wiki2.models import Wiki2 as Wiki
from seahub.wiki2.models import WikiPageTrash, Wiki2Publish
from seahub.wiki2.utils import is_valid_wiki_name, can_edit_wiki, get_wiki_dirs_by_path, \
get_wiki_config, WIKI_PAGES_DIR, WIKI_CONFIG_PATH, WIKI_CONFIG_FILE_NAME, is_group_wiki, \
check_wiki_admin_permission, check_wiki_permission, get_all_wiki_ids, get_and_gen_page_nav_by_id, \
get_current_level_page_ids, save_wiki_config, gen_unique_id, gen_new_page_nav_by_id, pop_nav, \
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack
delete_page, move_nav, revert_nav, get_sub_ids_by_page_id, get_parent_id_stack, SEARCH_WIKIS_LIMIT, \
RELATED_WIKIS_PREFIX, RELATED_WIKIS_CACHE_TIMEOUT

from seahub.utils import is_org_context, get_user_repos, gen_inner_file_get_url, gen_file_upload_url, \
normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name
normalize_dir_path, is_pro_version, check_filename_with_rename, is_valid_dirent_name, get_no_duplicate_obj_name, \
normalize_cache_key
from seahub.views import check_folder_permission
from seahub.base.templatetags.seahub_tags import email2nickname
from seahub.utils.file_op import check_file_lock, ONLINE_OFFICE_LOCK_OWNER, if_locked_by_online_office
from seahub.utils.repo import parse_repo_perm, get_repo_owner
from seahub.utils.repo import parse_repo_perm, get_repo_owner, is_valid_repo_id_format
from seahub.seadoc.utils import get_seadoc_file_uuid, gen_seadoc_access_token, copy_sdoc_images_with_sdoc_uuid
from seahub.settings import SEADOC_SERVER_URL, ENABLE_STORAGE_CLASSES, STORAGE_CLASS_MAPPING_POLICY, \
ENCRYPTED_LIBRARY_VERSION
Expand Down Expand Up @@ -1267,3 +1271,55 @@ def delete(self, request, wiki_id):
if publish_config:
publish_config.delete()
return Response({'success': True})


class WikiSearch(APIView):
authentication_classes = (TokenAuthentication, SessionAuthentication)
permission_classes = (IsAuthenticated, )
throttle_classes = (UserRateThrottle, )

def post(self, request):
query = request.data.get('query')
search_wiki = request.data.get('search_wiki', 'all')

try:
count = int(request.data.get('count'))
except:
count = 20

if not query:
return api_error(status.HTTP_400_BAD_REQUEST, 'wiki search query invalid')

if not is_valid_repo_id_format(search_wiki) and search_wiki != 'all':
error_msg = 'search_wiki invalid.'
return api_error(status.HTTP_400_BAD_REQUEST, error_msg)

if search_wiki == 'all':
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

这部分可以去掉了

org_id = request.user.org.org_id if is_org_context(request) else None

username = request.user.username
key = normalize_cache_key(username, RELATED_WIKIS_PREFIX)

wikis = cache.get(key, [])
if not wikis:
wikis = get_search_wiki_ids(username, org_id)[:SEARCH_WIKIS_LIMIT]
cache.set(key, wikis, RELATED_WIKIS_CACHE_TIMEOUT)
else:
wikis = search_wiki
params = {
'query': query,
'wikis': wikis,
'count': count,
}

try:
resp = wiki_search(params)
if resp.status_code == 500:
logger.error('search in wiki error status: %s body: %s', resp.status_code, resp.text)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')
resp_json = resp.json()
except Exception as e:
logger.error(e)
return api_error(status.HTTP_500_INTERNAL_SERVER_ERROR, 'Internal Server Error')

return Response(resp_json, resp.status_code)
18 changes: 18 additions & 0 deletions seahub/api2/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,24 @@ def get_search_repos(username, org_id):

return repos

def get_search_wiki_ids(username, org_id):
owned_wikis, shared_wikis, group_wikis, public_wikis = get_user_repos(username, org_id=org_id)
Copy link
Contributor

@JoinTyang JoinTyang Sep 30, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

变量名不对,现在查出来的是资料库,还不是wiki, 下面最好也别用wiki_id,因为实际上并不是wiki_id 而是repo_id

wiki_list = owned_wikis + shared_wikis + group_wikis

wiki_id_set = set()
for wiki in wiki_list:
if not is_wiki_repo(wiki):
continue
wiki_id = wiki.id
if wiki.origin_repo_id:
wiki_id = wiki.origin_repo_id

if wiki_id in wiki_id_set:
continue
wiki_id_set.add(wiki_id)

return list(wiki_id_set)

def send_share_link_emails(emails, fs, shared_from):
subject = _("A share link for you")
for email in emails:
Expand Down
5 changes: 4 additions & 1 deletion seahub/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,8 @@
from seahub.ocm.settings import OCM_ENDPOINT
from seahub.wiki2.views import wiki_view, wiki_publish_view
from seahub.api2.endpoints.wiki2 import Wikis2View, Wiki2View, Wiki2ConfigView, Wiki2PagesView, Wiki2PageView, \
Wiki2DuplicatePageView, WikiPageTrashView, Wiki2PublishView, Wiki2PublishConfigView, Wiki2PublishPageView
Wiki2DuplicatePageView, WikiPageTrashView, Wiki2PublishView, Wiki2PublishConfigView, Wiki2PublishPageView, \
WikiSearch
from seahub.api2.endpoints.subscription import SubscriptionView, SubscriptionPlansView, SubscriptionLogsView
from seahub.api2.endpoints.metadata_manage import MetadataRecords, MetadataManage, MetadataColumns, MetadataRecordInfo, \
MetadataViews, MetadataViewsMoveView, MetadataViewsDetailView, MetadataViewsDuplicateView
Expand Down Expand Up @@ -553,6 +554,8 @@
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/duplicate-page/$', Wiki2DuplicatePageView.as_view(), name='api-v2.1-wiki2-duplicate-page'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/trash/', WikiPageTrashView.as_view(), name='api-v2.1-wiki2-trash'),
re_path(r'^api/v2.1/wiki2/(?P<wiki_id>[-0-9a-f]{36})/publish/$', Wiki2PublishView.as_view(), name='api-v2.1-wiki2-publish'),
re_path(r'^api/v2.1/wiki2/search/$', WikiSearch.as_view(), name='api-v2.1-wiki2-search'),

## user::drafts
re_path(r'^api/v2.1/drafts/$', DraftsView.as_view(), name='api-v2.1-drafts'),
re_path(r'^api/v2.1/drafts/(?P<pk>\d+)/$', DraftView.as_view(), name='api-v2.1-draft'),
Expand Down
4 changes: 4 additions & 0 deletions seahub/wiki2/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,10 @@
WIKI_CONFIG_PATH = '_Internal/Wiki'
WIKI_CONFIG_FILE_NAME = 'index.json'

SEARCH_WIKIS_LIMIT = 200
RELATED_WIKIS_PREFIX = 'RELATED_WIKIS_'
RELATED_WIKIS_CACHE_TIMEOUT = 2 * 60 * 60


def is_valid_wiki_name(name):
name = name.strip()
Expand Down
Loading