From 1a8985ba780c71d9f57128fcddf9a364eb77c71b Mon Sep 17 00:00:00 2001 From: Ivar Flakstad <69173633+ivarflakstad@users.noreply.github.com> Date: Tue, 25 Jun 2024 20:53:19 +0200 Subject: [PATCH] Begin work of fixing tests, formatting, linting, etc --- server/config/default.toml | 5 ++ server/src/auth/mod.rs | 2 +- server/src/auth/models.rs | 3 +- server/src/auth/services.rs | 15 +++- server/src/llms/toxicity.rs | 21 +++-- server/src/rag/brave_search.rs | 94 ++++++++++++----------- server/src/rag/search.rs | 6 +- server/src/search/services.rs | 2 +- server/src/startup.rs | 2 +- server/src/users/models.rs | 39 +++++----- server/src/users/routes.rs | 30 ++++---- server/src/users/services.rs | 29 ++++--- server/tests/health_check.rs | 5 +- server/tests/search.rs | 135 +++++++++++---------------------- server/tests/users.rs | 7 +- 15 files changed, 190 insertions(+), 205 deletions(-) diff --git a/server/config/default.toml b/server/config/default.toml index d70c002b..4e7a9acc 100644 --- a/server/config/default.toml +++ b/server/config/default.toml @@ -16,18 +16,23 @@ top_p = 0.7 [openai] api_url = "https://api.openai.com/v1/chat/completions" model = "gpt-4o" +api_key = "" [query_rephraser] model = "mistralai/Mistral-7B-Instruct-v0.2" max_tokens = 100 +api_key = "" [llm] +toxicity_auth_token = "" toxicity_threshold = 0.75 [pubmed] url_prefix = "https://pubmed.ncbi.nlm.nih.gov" [brave] +subscription_key = "" +goggles_id = "" url = "https://api.search.brave.com/res/v1/web/search" count = 10 result_filter = "query,web" diff --git a/server/src/auth/mod.rs b/server/src/auth/mod.rs index bb88c625..aa297c89 100644 --- a/server/src/auth/mod.rs +++ b/server/src/auth/mod.rs @@ -5,5 +5,5 @@ pub mod models; pub mod oauth2; pub mod routes; pub mod services; -pub mod utils; pub(crate) mod sessions; +pub mod utils; diff --git a/server/src/auth/models.rs b/server/src/auth/models.rs index f1857a8d..3915102d 100644 --- a/server/src/auth/models.rs +++ b/server/src/auth/models.rs @@ -115,7 +115,8 @@ async fn oauth_authenticate( .map_err(BackendError::Reqwest)?; // Persist user in our database, so we can use `get_user`. - let user = sqlx::query_as!(User, + let user = sqlx::query_as!( + User, " insert into users (username, access_token) values ($1, $2) diff --git a/server/src/auth/services.rs b/server/src/auth/services.rs index 13a2a042..7fbe4e14 100644 --- a/server/src/auth/services.rs +++ b/server/src/auth/services.rs @@ -6,7 +6,10 @@ use color_eyre::eyre::eyre; use sqlx::PgPool; #[tracing::instrument(level = "debug", ret, err)] -pub async fn register(pool: PgPool, request: models::RegisterUserRequest) -> crate::Result { +pub async fn register( + pool: PgPool, + request: models::RegisterUserRequest, +) -> crate::Result { if let Some(password) = request.password { let password_hash = utils::hash_password(password).await?; let user = sqlx::query_as!( @@ -49,12 +52,16 @@ pub async fn register(pool: PgPool, request: models::RegisterUserRequest) -> cra } pub async fn is_email_whitelisted(pool: &PgPool, email: &String) -> crate::Result { - let whitelisted_email = sqlx::query_as!(models::WhitelistedEmail, "SELECT * FROM whitelisted_emails WHERE email = $1", email) + let whitelisted_email = sqlx::query_as!( + models::WhitelistedEmail, + "SELECT * FROM whitelisted_emails WHERE email = $1", + email + ) .fetch_one(pool) .await; match whitelisted_email { Ok(whitelisted_email) => Ok(whitelisted_email.approved), - _ => Ok(false) + _ => Ok(false), } - } +} diff --git a/server/src/llms/toxicity.rs b/server/src/llms/toxicity.rs index 23a4e53a..f4fd6ce2 100644 --- a/server/src/llms/toxicity.rs +++ b/server/src/llms/toxicity.rs @@ -1,8 +1,8 @@ use crate::llms::LLMSettings; use color_eyre::eyre::eyre; +use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; use reqwest::Client; use serde::{Deserialize, Serialize}; -use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; #[derive(Debug, Serialize, Deserialize)] pub struct ToxicityInput { @@ -10,7 +10,7 @@ pub struct ToxicityInput { } #[derive(Debug, Serialize, Deserialize)] -struct ToxicityAPIResponse (pub Vec); +struct ToxicityAPIResponse(pub Vec); #[derive(Debug, Serialize, Deserialize)] struct ToxicityScore { @@ -25,8 +25,10 @@ pub async fn predict_toxicity( ) -> crate::Result { let mut headers = HeaderMap::new(); headers.insert( - HeaderName::from_bytes(b"Authorization").map_err(|e| eyre!("Failed to create header: {e}"))?, - HeaderValue::from_str(&llm_settings.toxicity_auth_token.expose()).map_err(|e| eyre!("Failed to create header: {e}"))?, + HeaderName::from_bytes(b"Authorization") + .map_err(|e| eyre!("Failed to create header: {e}"))?, + HeaderValue::from_str(&llm_settings.toxicity_auth_token.expose()) + .map_err(|e| eyre!("Failed to create header: {e}"))?, ); let client = Client::new(); @@ -43,9 +45,12 @@ pub async fn predict_toxicity( .await .map_err(|e| eyre!("Failed to parse toxicity response: {e}"))?; - let toxicity_score = toxicity_api_response.into_iter().find(|x| x.label == String::from("toxic")).unwrap_or(ToxicityScore { - score: 0.0, - label: String::from(""), - }); + let toxicity_score = toxicity_api_response + .into_iter() + .find(|x| x.label == String::from("toxic")) + .unwrap_or(ToxicityScore { + score: 0.0, + label: String::from(""), + }); Ok(toxicity_score.score > llm_settings.toxicity_threshold) } diff --git a/server/src/rag/brave_search.rs b/server/src/rag/brave_search.rs index 01e10728..7b081964 100644 --- a/server/src/rag/brave_search.rs +++ b/server/src/rag/brave_search.rs @@ -25,6 +25,54 @@ pub struct BraveAPIConfig { pub headers: HeaderMap, } +impl From for BraveAPIConfig { + fn from(brave_settings: BraveSettings) -> Self { + let queries = vec![ + (String::from("count"), brave_settings.count.to_string()), + ( + String::from("goggles_id"), + brave_settings.goggles_id.clone(), + ), + ( + String::from("result_filter"), + brave_settings.result_filter.clone(), + ), + ( + String::from("search_lang"), + brave_settings.search_lang.clone(), + ), + ( + String::from("extra_snippets"), + brave_settings.extra_snippets.to_string(), + ), + ( + String::from("safesearch"), + brave_settings.safesearch.clone(), + ), + ]; + + let headers = HeaderMap::from_iter( + vec![ + ("Accept", "application/json"), + ("Accept-Encoding", "gzip"), + ( + "X-Subscription-Token", + brave_settings.subscription_key.expose(), + ), + ] + .into_iter() + .map(|(k, v)| { + ( + HeaderName::from_bytes(k.as_bytes()).unwrap(), + HeaderValue::from_str(v).unwrap(), + ) + }), + ); + + BraveAPIConfig { queries, headers } + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct BraveWebSearchResult { pub title: String, @@ -46,52 +94,6 @@ struct BraveAPIResponse { pub web: BraveWebAPIResponse, } -pub fn prepare_brave_api_config(brave_settings: &BraveSettings) -> BraveAPIConfig { - let queries = vec![ - (String::from("count"), brave_settings.count.to_string()), - ( - String::from("goggles_id"), - brave_settings.goggles_id.clone(), - ), - ( - String::from("result_filter"), - brave_settings.result_filter.clone(), - ), - ( - String::from("search_lang"), - brave_settings.search_lang.clone(), - ), - ( - String::from("extra_snippets"), - brave_settings.extra_snippets.to_string(), - ), - ( - String::from("safesearch"), - brave_settings.safesearch.clone(), - ), - ]; - - let headers = HeaderMap::from_iter( - vec![ - ("Accept", "application/json"), - ("Accept-Encoding", "gzip"), - ( - "X-Subscription-Token", - brave_settings.subscription_key.expose(), - ), - ] - .into_iter() - .map(|(k, v)| { - ( - HeaderName::from_bytes(k.as_bytes()).unwrap(), - HeaderValue::from_str(v).unwrap(), - ) - }), - ); - - BraveAPIConfig { queries, headers } -} - #[tracing::instrument(level = "debug", ret, err)] pub async fn web_search( brave_settings: &BraveSettings, diff --git a/server/src/rag/search.rs b/server/src/rag/search.rs index 10c0a430..cd81f185 100644 --- a/server/src/rag/search.rs +++ b/server/src/rag/search.rs @@ -14,7 +14,7 @@ pub async fn search( brave_api_config: &brave_search::BraveAPIConfig, cache: &CachePool, agency_service: &mut AgencyServiceClient, - search_query: &String, + search_query: &str, ) -> crate::Result { if let Some(response) = cache.get(&search_query).await { return Ok(response); @@ -47,7 +47,7 @@ pub async fn search( let compressed_results = prompt_compression::compress( &settings.llm, prompt_compression::PromptCompressionInput { - query: search_query.clone(), + query: search_query.to_string(), target_token: 300, context_texts_list: retrieved_results.iter().map(|r| r.text.clone()).collect(), }, @@ -67,7 +67,7 @@ pub async fn search( async fn retrieve_result_from_agency( settings: &Settings, agency_service: &mut AgencyServiceClient, - search_query: &String, + search_query: &str, ) -> crate::Result> { let agency_service = Arc::new(agency_service.clone()); let query_embeddings = diff --git a/server/src/search/services.rs b/server/src/search/services.rs index 9000e9ce..8df4379b 100644 --- a/server/src/search/services.rs +++ b/server/src/search/services.rs @@ -10,7 +10,7 @@ pub async fn insert_new_search( pool: &PgPool, user_id: &Uuid, search_query_request: &api_models::SearchQueryRequest, - rephrased_query: &String, + rephrased_query: &str, ) -> crate::Result { let thread = match search_query_request.thread_id { Some(thread_id) => { diff --git a/server/src/startup.rs b/server/src/startup.rs index f702bd72..7d26f450 100644 --- a/server/src/startup.rs +++ b/server/src/startup.rs @@ -90,7 +90,7 @@ impl AppState { cache: CachePool::new(&settings.cache).await?, agency_service: agency_service_connect(settings.agency_api.expose()).await?, oauth2_clients: settings.oauth2_clients.clone(), - brave_config: brave_search::prepare_brave_api_config(&settings.brave), + brave_config: settings.brave.clone().into(), settings, openai_stream_regex: Regex::new(r#"\"content\":\"(.*?)\"}"#) .map_err(|e| eyre!("Failed to compile OpenAI stream regex: {}", e))?, diff --git a/server/src/users/models.rs b/server/src/users/models.rs index 4e058d2a..45a3073f 100644 --- a/server/src/users/models.rs +++ b/server/src/users/models.rs @@ -11,19 +11,19 @@ use std::fmt::Debug; #[derive(Serialize, Deserialize, Clone, Copy, Debug)] pub enum UserGroup { - Alpha, - Beta, - Public, + Alpha, + Beta, + Public, } // Move Public to top before public release impl From for UserGroup { - fn from(value: i32) -> Self { - match value { - 0 => UserGroup::Alpha, - 1 => UserGroup::Beta, - _ => UserGroup::Public, - } - } + fn from(value: i32) -> Self { + match value { + 0 => UserGroup::Alpha, + 1 => UserGroup::Beta, + _ => UserGroup::Public, + } + } } #[derive(sqlx::FromRow, Serialize, Clone, Debug)] @@ -117,7 +117,6 @@ pub struct UpdatePasswordRequest { pub new_password: Secret, } - #[derive(Serialize, Deserialize, Debug)] pub struct UpdateProfileRequest { pub username: Option, @@ -127,14 +126,16 @@ pub struct UpdateProfileRequest { pub company: Option, } - impl UpdateProfileRequest { pub fn has_any_value(&self) -> bool { - [self.username.is_some(), - self.email.is_some(), - self.fullname.is_some(), - self.title.is_some(), - self.company.is_some() - ].iter().any(|&x| x) + [ + self.username.is_some(), + self.email.is_some(), + self.fullname.is_some(), + self.title.is_some(), + self.company.is_some(), + ] + .iter() + .any(|&x| x) } -} \ No newline at end of file +} diff --git a/server/src/users/routes.rs b/server/src/users/routes.rs index 6efa7a35..703dfb31 100644 --- a/server/src/users/routes.rs +++ b/server/src/users/routes.rs @@ -1,14 +1,14 @@ -use crate::auth::AuthSession; use crate::auth::utils::verify_user_password; +use crate::auth::AuthSession; use crate::err::AppError; use crate::startup::AppState; -use crate::users::{User, UserRecord, models, services}; -use color_eyre::eyre::eyre; +use crate::users::{models, services, User, UserRecord}; +use axum::extract::State; use axum::http::StatusCode; use axum::response::IntoResponse; use axum::routing::{get, patch}; -use axum::extract::State; use axum::{Form, Json, Router}; +use color_eyre::eyre::eyre; use sqlx::PgPool; #[tracing::instrument(level = "debug", skip_all, ret, err(Debug))] @@ -25,11 +25,11 @@ async fn get_user_handler(auth_session: AuthSession) -> crate::Result, user: User, - Json(update_profile_request): Json -) -> crate::Result> { + Json(update_profile_request): Json, +) -> crate::Result> { let user_id = user.user_id; if !update_profile_request.has_any_value() { - return Err(eyre!("At least one field has to be updated.").into()) + return Err(eyre!("At least one field has to be updated.").into()); } let updated_user = services::update_profile(&pool, &user_id, update_profile_request).await?; @@ -40,21 +40,23 @@ async fn update_profile_handler( async fn update_password_handler( State(pool): State, user: User, - Form(update_password_request): Form -) -> crate::Result { + Form(update_password_request): Form, +) -> crate::Result { let user_id = user.user_id; - if update_password_request.old_password.expose() == update_password_request.new_password.expose() { + if update_password_request.old_password.expose() + == update_password_request.new_password.expose() + { return Err(eyre!("Old and new password can not be the same.").into()); } match verify_user_password(Some(user), update_password_request.old_password) { Ok(Some(_user)) => { - services::update_password(&pool, &user_id, update_password_request.new_password).await?; + services::update_password(&pool, &user_id, update_password_request.new_password) + .await?; Ok((StatusCode::OK, ())) - }, - _ => Err(eyre!("Failed to authenticate old password").into()) - + } + _ => Err(eyre!("Failed to authenticate old password").into()), } } diff --git a/server/src/users/services.rs b/server/src/users/services.rs index 33f2c17d..bd75de29 100644 --- a/server/src/users/services.rs +++ b/server/src/users/services.rs @@ -1,5 +1,5 @@ -use crate::secrets::Secret; use crate::auth::utils::hash_password; +use crate::secrets::Secret; use crate::users::models; use sqlx::PgPool; use uuid::Uuid; @@ -11,8 +11,8 @@ pub async fn update_profile( update_profile_request: models::UpdateProfileRequest, ) -> crate::Result { let user = sqlx::query_as!( - models::User, - " + models::User, + " update users set username = coalesce($1::text, username), @@ -22,12 +22,12 @@ pub async fn update_profile( company = coalesce($5::text, company) where user_id = $6 returning * ", - update_profile_request.username, - update_profile_request.email, - update_profile_request.fullname, - update_profile_request.title, - update_profile_request.company, - user_id, + update_profile_request.username, + update_profile_request.email, + update_profile_request.fullname, + update_profile_request.title, + update_profile_request.company, + user_id, ) .fetch_one(pool) .await?; @@ -35,7 +35,6 @@ pub async fn update_profile( return Ok(user); } - #[tracing::instrument(level = "debug", ret, err)] pub async fn update_password( pool: &PgPool, @@ -44,13 +43,13 @@ pub async fn update_password( ) -> crate::Result<()> { let password_hash = hash_password(password).await?; sqlx::query_as!( - User, - "update users set password_hash = $1 where user_id = $2", - password_hash.expose(), - user_id + User, + "update users set password_hash = $1 where user_id = $2", + password_hash.expose(), + user_id ) .execute(pool) .await?; return Ok(()); -} \ No newline at end of file +} diff --git a/server/tests/health_check.rs b/server/tests/health_check.rs index 1fb3b8fb..4ef26fb8 100644 --- a/server/tests/health_check.rs +++ b/server/tests/health_check.rs @@ -15,12 +15,15 @@ async fn health_check_works(pool: PgPool) { let agency_service = agency_service_connect(&settings.agency_api.expose()) .await .unwrap(); + let brave_api_config = settings.brave.clone().into(); let state = AppState::new( pool, cache, agency_service, - settings.oauth2_clients.clone(), + vec![], settings, + brave_api_config, + regex::Regex::new("").unwrap(), ) .await .unwrap(); diff --git a/server/tests/search.rs b/server/tests/search.rs index bcbc139c..110b73cf 100644 --- a/server/tests/search.rs +++ b/server/tests/search.rs @@ -1,15 +1,11 @@ use server::auth::models::RegisterUserRequest; use server::auth::register; use server::cache::{CachePool, CacheSettings}; -use server::proto::{SearchResponse, Source}; +use server::rag::{search, SearchResponse, Source}; use server::search::{ - get_one_search_history, get_search_history, get_top_searches, insert_search_history, search, - update_search_reaction, -}; -use server::search::{ - SearchHistoryByIdRequest, SearchHistoryRequest, SearchQueryRequest, SearchReactionRequest, - TopSearchRequest, + get_one_search, insert_new_search, update_search_reaction, SearchByIdRequest, SourceType, }; +use server::search::{SearchQueryRequest, SearchReactionRequest}; use server::settings::Settings; use server::startup::agency_service_connect; use server::Result; @@ -24,35 +20,19 @@ async fn search_test() -> Result<()> { .await .unwrap(); let cache = CachePool::new(&settings.cache).await?; - - let search_query = SearchQueryRequest { - session_id: Some(Uuid::new_v4()), - query: "test".to_string(), - }; - - let search_result = search(&cache, &mut agency_service, &search_query).await; + let brave_api_config = settings.brave.clone().into(); + + let search_result = search( + &settings, + &brave_api_config, + &cache, + &mut agency_service, + "test", + ) + .await; assert!(search_result.is_ok()); - assert_eq!(search_result.unwrap().status, 200); - - Ok(()) -} - -#[tokio::test] -async fn top_searches_test() -> Result<()> { - let cache_settings = CacheSettings { - url: "redis://127.0.0.1/".to_string().into(), - enabled: true, - ttl: 3600, - max_sorted_size: 100, - }; - let cache = CachePool::new(&cache_settings).await?; - - let top_search_query = TopSearchRequest { limit: Some(1) }; - - let top_searches_result = get_top_searches(&cache, &top_search_query).await; - assert!(top_searches_result.is_ok()); - assert_eq!(top_searches_result.unwrap().len(), 1); + assert_eq!(search_result.unwrap().result, ""); Ok(()) } @@ -75,14 +55,17 @@ async fn insert_search_and_get_search_history_test(pool: PgPool) -> Result<()> { let user_id = new_user.user_id; let search_query = SearchQueryRequest { - session_id: Some(Uuid::new_v4()), - query: "test_query".to_string(), + thread_id: Some(Uuid::new_v4()), + query: "test-query".to_string(), }; - let search_response = SearchResponse { - status: 200, - result: "test_result".to_string(), + let rephrased_query = "test-rephrased-query"; + let expected_response = SearchResponse { + result: "test-result".to_string(), sources: vec![Source { - url: "test_url".to_string(), + url: "test-url".to_string(), + title: "test-title".to_string(), + description: "test-description".to_string(), + source_type: SourceType::Pdf, metadata: HashMap::from([ ("test_key1".to_string(), "test_value1".to_string()), ("test_key2".to_string(), "test_value2".to_string()), @@ -91,40 +74,20 @@ async fn insert_search_and_get_search_history_test(pool: PgPool) -> Result<()> { }; let search_insertion_result = - insert_search_history(&pool, &cache, &user_id, &search_query, &search_response).await; - - assert!(search_insertion_result.is_ok()); - - let one_search_history_request = SearchHistoryByIdRequest { - search_history_id: search_insertion_result.unwrap().search_history_id, - }; - - let one_search_history_result = - get_one_search_history(&pool, &user_id, &one_search_history_request).await; + insert_new_search(&pool, &user_id, &search_query, rephrased_query).await?; - assert!(one_search_history_result.is_ok()); - let one_search_history_result = one_search_history_result.unwrap(); - - assert_eq!(one_search_history_result.query, search_query.query); - assert_eq!(one_search_history_result.user_id, user_id); - assert_eq!(one_search_history_result.result, search_response.result); - assert_eq!(one_search_history_result.sources.0, search_response.sources); - - let search_history_request = SearchHistoryRequest { - limit: Some(1), - offset: Some(0), + let one_search_history_request = SearchByIdRequest { + search_id: search_insertion_result.search_id, }; - let search_history_result = get_search_history(&pool, &user_id, &search_history_request).await; - - assert!(&search_history_result.is_ok()); - let search_history_result = search_history_result.unwrap(); + let actual_response = get_one_search(&pool, &user_id, &one_search_history_request).await?; - assert_eq!(&search_history_result.len(), &1); - assert_eq!(&search_history_result[0].query, &search_query.query); - assert_eq!(search_history_result[0].user_id, user_id); - assert_eq!(search_history_result[0].result, search_response.result); - assert_eq!(search_history_result[0].sources.0, search_response.sources); + assert_eq!(actual_response.search.query, search_query.query); + assert_eq!(actual_response.search.result, expected_response.result); + assert_eq!(actual_response.sources.len(), 1); + let actual_source = actual_response.sources[0].clone(); + let expected_source = expected_response.sources[0].clone(); + assert_eq!(actual_source.title, expected_source.title); Ok(()) } @@ -147,42 +110,36 @@ async fn update_search_reaction_test(pool: PgPool) -> Result<()> { let user_id = new_user.user_id; let search_query = SearchQueryRequest { - session_id: None, - query: "test_query".to_string(), + thread_id: Some(Uuid::new_v4()), + query: "test-query".to_string(), }; - let search_response = SearchResponse { - status: 200, - result: "test_result".to_string(), + let rephrased_query = "test-rephrased-query"; + let expected_response = SearchResponse { + result: "test-result".to_string(), sources: vec![Source { - url: "test_url".to_string(), + url: "test-url".to_string(), + title: "test-title".to_string(), + description: "test-description".to_string(), + source_type: SourceType::Pdf, metadata: HashMap::from([ ("test_key1".to_string(), "test_value1".to_string()), ("test_key2".to_string(), "test_value2".to_string()), ]), }], }; - let search_insertion_result = - insert_search_history(&pool, &cache, &user_id, &search_query, &search_response).await; - - assert!(search_insertion_result.is_ok()); - let search_insertion_result = search_insertion_result.unwrap(); + insert_new_search(&pool, &user_id, &search_query, rephrased_query).await?; let search_reaction_request = SearchReactionRequest { - search_history_id: search_insertion_result.search_history_id, + search_id: search_insertion_result.search_id, reaction: true, }; let search_reaction_result = - update_search_reaction(&pool, &user_id, &search_reaction_request).await; - - assert!(&search_reaction_result.is_ok()); - let search_reaction_result = search_reaction_result.unwrap(); + update_search_reaction(&pool, &user_id, &search_reaction_request).await?; assert_eq!(&search_reaction_result.query, &search_query.query); - assert_eq!(&search_reaction_result.user_id, &user_id); - assert_eq!(&search_reaction_result.result, &search_response.result); - assert_eq!(&search_reaction_result.sources.0, &search_response.sources); + assert_eq!(&search_reaction_result.result, &expected_response.result); assert_eq!( search_reaction_result.reaction.unwrap(), search_reaction_request.reaction diff --git a/server/tests/users.rs b/server/tests/users.rs index f1eb3458..9281be25 100644 --- a/server/tests/users.rs +++ b/server/tests/users.rs @@ -53,12 +53,15 @@ async fn register_users_works(pool: PgPool) { let agency_service = agency_service_connect(&settings.agency_api.expose()) .await .unwrap(); + let brave_api_config = settings.brave.clone().into(); let state = AppState::new( - pool.clone(), + pool, cache, agency_service, - settings.oauth2_clients.clone(), + vec![], settings, + brave_api_config, + regex::Regex::new("").unwrap(), ) .await .unwrap();