From df3c4ebcbc88a61697b0ffb2396007e94e32ece7 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 14:56:23 +0200 Subject: [PATCH 01/10] added uwebsockets submodule --- .gitmodules | 3 +++ ext/uWebSockets | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 ext/uWebSockets diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..85662a026 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "ext/uWebSockets"] + path = ext/uWebSockets + url = https://github.com/uNetworking/uWebSockets.git diff --git a/ext/uWebSockets b/ext/uWebSockets new file mode 160000 index 000000000..f1b10ed22 --- /dev/null +++ b/ext/uWebSockets @@ -0,0 +1 @@ +Subproject commit f1b10ed221b36c0d8848998e3e939823f66496eb From ccbc1008222407de9462df4746acc4218241e71a Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 16:44:25 +0200 Subject: [PATCH 02/10] realtime_scores with uWebSockets --- .gitignore | 1 + CMakeLists.txt | 2 +- install.py | 9 + test/model/CMakeLists.txt | 21 +- test/model/src/realtime_scores.cpp | 302 +++++++++++++++-------------- 5 files changed, 190 insertions(+), 145 deletions(-) diff --git a/.gitignore b/.gitignore index 27e9957a1..22b2a44de 100644 --- a/.gitignore +++ b/.gitignore @@ -120,6 +120,7 @@ test/model/res/* .INSTALLED +.uWebSockets_INSTALLED .RES_EXTRACTED .env diff --git a/CMakeLists.txt b/CMakeLists.txt index c10c29b9c..61fa4c660 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -52,7 +52,7 @@ cmake_print_variables(SQLite3_INCLUDE_DIRS) cmake_print_variables(SQLite3_LIBRARIES) # Set up the compiler. -set(CMAKE_CXX_FLAGS "-std=c++14 -Wall -Wextra -pedantic -Wredundant-decls -Wdisabled-optimization -Wctor-dtor-privacy -Wnon-virtual-dtor -Werror=old-style-cast -Wsign-promo -Werror=return-type -Wno-unused-parameter") +set(CMAKE_CXX_FLAGS "-std=c++20 -Wall -Wextra -pedantic -Wredundant-decls -Wdisabled-optimization -Wctor-dtor-privacy -Wnon-virtual-dtor -Werror=old-style-cast -Wsign-promo -Werror=return-type -Wno-unused-parameter") set(CMAKE_CXX_FLAGS_DEBUG "-O3 -g") set(CMAKE_CXX_FLAGS_RELEASE "-O3") diff --git a/install.py b/install.py index 069fef656..0ca91e06d 100755 --- a/install.py +++ b/install.py @@ -100,6 +100,15 @@ def build_external_libraries(args, dependencies): f = open("ext/igraph_0.9.8/.INSTALLED", "w") f.close() + # uWebSockets + if os.path.isfile("ext/.uWebSockets_INSTALLED") and not args.clean: + print("-- uWebSocket library already built.") + else: + print("-- Building uWebSockets library") + subprocess.call('git submodule update --init --recursive; cd ext/uWebSockets/uSockets;make boringssl;cd boringssl;BORINGSSL=$PWD;cd ../lsquic;cmake -DBORINGSSL_DIR=$BORINGSSL .;make;cd ..;WITH_LTO=1 WITH_QUIC=1 WITH_BORINGSSL=1 make', shell=True) + f = open("ext/.uWebSockets_INSTALLED", "w") + f.close() + def extract_all_zips_in_folder(folder): print("extracting files in " + folder) diff --git a/test/model/CMakeLists.txt b/test/model/CMakeLists.txt index 3ab8938d3..1a448d778 100644 --- a/test/model/CMakeLists.txt +++ b/test/model/CMakeLists.txt @@ -56,7 +56,26 @@ if(APPLE) target_link_libraries(epiJSON omp) endif() +# uWebSocekt stuff +file(GLOB uSocketObjects + ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/*.o +) + add_executable(realtime_scores src/realtime_scores.cpp) -target_link_libraries(realtime_scores boost_filesystem igraph drogon trantor ssl crypto z jsoncpp uuid brotlidec brotlienc) +target_link_libraries(realtime_scores + boost_filesystem + igraph + z + m + ${uSocketObjects} + ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/lsquic/src/liblsquic/liblsquic.a + ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/boringssl/build/ssl/libssl.a + ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/boringssl/build/crypto/libcrypto.a +) +target_include_directories(realtime_scores + PUBLIC ${PROJECT_SOURCE_DIR}/ext/uWebSockets/src + PUBLIC ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/src + PUBLIC ${PROJECT_SOURCE_DIR}/ext/uWebSockets/uSockets/boringssl/include +) add_subdirectory(src) diff --git a/test/model/src/realtime_scores.cpp b/test/model/src/realtime_scores.cpp index 1e1d69197..9c9a7c642 100644 --- a/test/model/src/realtime_scores.cpp +++ b/test/model/src/realtime_scores.cpp @@ -3,7 +3,8 @@ // #include -#include +#include "../../ext/uWebSockets/src/App.h" +#include "../../ext/rapidjson/document.h" #define HEADER_ONLY #define ALLOW_MULTIPLE_SNPSTORAGES @@ -12,6 +13,14 @@ // using namespace epi; +std::string json_to_str(const rapidjson::Value &object) { + rapidjson::StringBuffer buffer; + rapidjson::Writer writer(buffer); + object.Accept(writer); + + return buffer.GetString(); +} + int main(int argc, char **argv) { // parse args CLI::App cli("Realtime scores - A tool to evaluate SNP-sets as a realtime API service"); @@ -40,19 +49,19 @@ int main(int argc, char **argv) { "Add a model to the list of models that are applied on the SNP sets. If none is specified, all available models will be used."); - int num_threads = 2; + unsigned int num_threads = 2; cli.add_option("--num-threads", num_threads, - "Number of threads to be used for score calculation. Default: 2. 0 means all available"); + "Number of threads to be used for score calculation. Default: 1. 0 means all available"); // Parse the options. CLI11_PARSE(cli, argc, argv); - // process num threads - drogon::app().setThreadNum(num_threads); - num_threads = drogon::app().getThreadNum(); + if (num_threads == 0) { + num_threads = std::thread::hardware_concurrency(); + } boost::filesystem::path gwas_dir_b(input_dir); - if(is_directory(gwas_dir_b)) { + if (is_directory(gwas_dir_b)) { for (auto it = directory_iterator(gwas_dir_b); it != directory_iterator(); ++it) { path path = it->path(); if (is_regular_file(path)) { @@ -70,7 +79,8 @@ int main(int argc, char **argv) { ncat = std::stoi(splits[3]); } - LOG_WARN << "dataset: { name: \"" << name << "\", format: \"" << format << "\", phenotype: \"" << phenotype << "\", num_categories: " << ncat << " }"; + std::cout << "dataset: { name: \"" << name << "\", format: \"" << format << "\", phenotype: \"" + << phenotype << "\", num_categories: " << ncat << " }" << std::endl; gwas_input_file.push_back(path.c_str()); gwas_input_format.push_back(format); @@ -86,49 +96,18 @@ int main(int argc, char **argv) { // process datasets auto num_datasets = gwas_input_file.size(); - /* - // process num_categories - if (num_categories.empty()) { - num_categories.push_back(2); - } - if (num_categories.size() == 1) { - for (size_t i = 1; i < num_datasets; i++) num_categories.push_back(num_categories[0]); - } - - // process input format - if (gwas_input_format.size() == 1) { - for (size_t i = 1; i < num_datasets; i++) gwas_input_format.push_back(gwas_input_format[0]); - } - - // process phenotype - if (gwas_input_phenotype.size() == 1) { - for (size_t i = 1; i < num_datasets; i++) gwas_input_phenotype.push_back(gwas_input_phenotype[0]); - } - - if ( - input_name.size() != num_datasets || - num_categories.size() != num_datasets || - gwas_input_phenotype.size() != num_datasets || - gwas_input_format.size() != num_datasets - ) { - throw epi::Error("You did not provide the right amount of arguments of each type."); - } - - */ - - // process models const std::vector all_available_models = epi::options::get_all_epistasis_scores(); if (models.empty()) { // set all available models - models = { all_available_models.begin(), all_available_models.end() }; + models = {all_available_models.begin(), all_available_models.end()}; } std::sort(models.begin(), models.end()); models.erase(std::unique(models.begin(), models.end()), models.end()); std::unordered_map models_map; - for (auto & m : models) models_map.insert({ m, epi::options::epistasis_score_from_string(m) }); + for (auto &m: models) models_map.insert({m, epi::options::epistasis_score_from_string(m)}); @@ -136,113 +115,150 @@ int main(int argc, char **argv) { std::unordered_map> instances; for (size_t i = 0; i < num_datasets; i++) { instances[input_name[i]] = std::make_shared(false); - auto il = epi::InstanceLoader(gwas_input_file[i], gwas_input_format[i], gwas_input_phenotype[i], num_categories[i]); + auto il = epi::InstanceLoader(gwas_input_file[i], gwas_input_format[i], gwas_input_phenotype[i], + num_categories[i]); il.run(instances[input_name[i]]); instances[input_name[i]]->snpStorage->init_model_containers(num_threads); } - drogon::app().registerHandler( - "/needl/score/{dataset}/{scores}/{snps}", - [&instances, &models_map](const drogon::HttpRequestPtr &, - std::function &&callback, - const std::string &dataset, const std::string &scores_str, const std::string &snps_str) { - - std::chrono::high_resolution_clock::time_point startTime = std::chrono::high_resolution_clock::now(); - - // find dataset - auto dm_ptr = instances.find(dataset); - if (dm_ptr == instances.end()) { - LOG_WARN << "Unknown dataset: " << dataset; - Json::Value json; - json["ok"] = false; - json["reason"] = "unknown dataset"; - auto resp = drogon::HttpResponse::newHttpJsonResponse(json); - resp->setStatusCode(drogon::k404NotFound); - callback(resp); - } else { - auto &dm = dm_ptr->second; - - // find the selected models - auto scores_s = epi::string_split(scores_str, ';'); - std::sort(scores_s.begin(), scores_s.end()); - scores_s.erase(std::unique(scores_s.begin(), scores_s.end()), scores_s.end()); - - Json::Value unknown_scores; - std::vector selected_models; - for (auto &user_score : scores_s) { - auto item = models_map.find(user_score); - if (item == models_map.end()) { - unknown_scores.append(user_score); - } else { - selected_models.push_back(item->second); - } - } - - try { - auto snps_s = epi::string_split(snps_str, ';'); - if (snps_s.size() > 10) { - LOG_WARN << "Too many SNPs (#snps = " << snps_s.size() << ")"; - Json::Value json; - json["ok"] = false; - json["reason"] = "too many SNPs (max. 10 allowed)"; - if (unknown_scores.size() > 0) json["unknown_or_unavailable_scores"] = unknown_scores; - auto resp = drogon::HttpResponse::newHttpJsonResponse(json); - resp->setStatusCode(drogon::k400BadRequest); - callback(resp); - } else { - std::vector snps_t; - snps_t.reserve(snps_s.size()); - Json::Value snps_j; - for (auto &s: snps_s) { - snps_t.push_back(dm->snpStorage->by_name(s)); - snps_j.append(s); - } - epi::SNPSet set(snps_t); - Json::Value json_scores; - for (auto &model: selected_models) { - json_scores[epi::options::epistasis_score_to_string(model)] = dm->snpStorage->calculate_score(set, model, - drogon::app().getCurrentThreadIndex()); - } - Json::Value json; - json["ok"] = true; - json["scores"] = json_scores; - json["SNPs"] = snps_j; - - if (unknown_scores.size() > 0) json["unknown_or_unavailable_scores"] = unknown_scores; - - std::chrono::high_resolution_clock::time_point endTime = std::chrono::high_resolution_clock::now(); - auto timeMs = std::chrono::duration_cast( - endTime - startTime).count(); - json["time_microseconds"] = timeMs; - callback(drogon::HttpResponse::newHttpJsonResponse(json)); - } - } catch (epi::SNPNotFoundError &err) { - LOG_WARN << "Unknown SNP " << err.get_name(); - Json::Value json; - json["ok"] = false; - json["reason"] = "unknown SNP " + err.get_name(); - json["unknown_snp"] = err.get_name(); - if (unknown_scores.size() > 0) json["unknown_or_unavailable_scores"] = unknown_scores; - auto resp = drogon::HttpResponse::newHttpJsonResponse(json); - resp->setStatusCode(drogon::k404NotFound); - callback(resp); - } catch (epi::Error &err) { - LOG_WARN << "epi::Error " << err.what(); - Json::Value json; - json["ok"] = false; - if (unknown_scores.size() > 0) json["unknown_or_unavailable_scores"] = unknown_scores; - auto resp = drogon::HttpResponse::newHttpJsonResponse(json); - resp->setStatusCode(drogon::k500InternalServerError); - callback(resp); - } - } - }, - {drogon::Get}); - + std::vector thread_no (num_threads); + for (size_t i = 0; i< num_threads; ++i) thread_no[i] = i; + + std::vector threads(num_threads); + std::mutex stderr_mutex; + + std::transform(thread_no.begin(), thread_no.end(), threads.begin(), [&stderr_mutex, &instances, &models_map, &bind_addr, &bind_port](size_t thread_index) { + return new std::thread([&stderr_mutex, &instances, &models_map, &bind_addr, &bind_port, &thread_index]() { + uWS::App().get("/needl/score/:dataset/:scores/:snps", [&stderr_mutex, &instances, &models_map, &thread_index](auto *res, auto *req) { + std::chrono::high_resolution_clock::time_point startTime = std::chrono::high_resolution_clock::now(); + + std::string dataset = std::string(req->getParameter(0)); + std::string scores_str = std::string(req->getParameter(1)); + std::string snps_str = std::string(req->getParameter(2)); + + rapidjson::Document doc; + auto alloc = doc.GetAllocator(); + + // find dataset + auto dm_ptr = instances.find(dataset); + if (dm_ptr == instances.end()) { + stderr_mutex.lock(); + std::cerr << "Unknown dataset: " << dataset << std::endl; + stderr_mutex.unlock(); + rapidjson::Value json(rapidjson::kObjectType); + json.AddMember("ok", false, alloc); + json.AddMember("reason", "unknown dataset", alloc); + res->writeStatus("404 Not Found"); + res->end(json_to_str(json)); + } else { + auto &dm = dm_ptr->second; + + // find the selected models + auto scores_s = epi::string_split(scores_str, ';'); + std::sort(scores_s.begin(), scores_s.end()); + scores_s.erase(std::unique(scores_s.begin(), scores_s.end()), scores_s.end()); + + rapidjson::Value unknown_scores(rapidjson::kArrayType); + std::vector selected_models; + for (auto &user_score: scores_s) { + auto item = models_map.find(user_score); + if (item == models_map.end()) { + unknown_scores.PushBack(rapidjson::Value(user_score.c_str(), alloc).Move(), alloc); + } else { + selected_models.push_back(item->second); + } + } + + try { + auto snps_s = epi::string_split(snps_str, ';'); + if (snps_s.size() > 10) { + stderr_mutex.lock(); + std::cerr << "Too many SNPs (#snps = " << snps_s.size() << ")" << std::endl; + stderr_mutex.unlock(); + rapidjson::Value json(rapidjson::kObjectType); + json.AddMember("ok", false, alloc); + json.AddMember("reason", "too many SNPs (max. 10 allowed)", alloc); + + if (unknown_scores.Capacity() > 0) + json.AddMember("unknown_or_unavailable_scores", unknown_scores.Move(), alloc); + + res->writeStatus("400 Bad Request"); + res->end(json_to_str(json)); + } else { + std::vector snps_t; + snps_t.reserve(snps_s.size()); + rapidjson::Value snps_j(rapidjson::kArrayType); + for (auto &s: snps_s) { + snps_t.push_back(dm->snpStorage->by_name(s)); + snps_j.PushBack(rapidjson::Value(s.c_str(), alloc).Move(), alloc); + } + epi::SNPSet set(snps_t); + rapidjson::Value json_scores(rapidjson::kObjectType); + for (auto &model: selected_models) { + json_scores.AddMember( + rapidjson::Value(epi::options::epistasis_score_to_string(model).c_str(), + alloc).Move(), + dm->snpStorage->calculate_score(set, model, thread_index), + alloc + ); + } + rapidjson::Value json(rapidjson::kObjectType); + json.AddMember("ok", true, alloc); + json.AddMember("scores", json_scores.Move(), alloc); + json.AddMember("SNPs", snps_j.Move(), alloc); + + if (unknown_scores.Capacity() > 0) + json.AddMember("unknown_or_unavailable_scores", unknown_scores.Move(), alloc); + + std::chrono::high_resolution_clock::time_point endTime = std::chrono::high_resolution_clock::now(); + auto timeMs = std::chrono::duration_cast( + endTime - startTime).count(); + json.AddMember("time_microseconds", timeMs, alloc); + res->end(json_to_str(json)); + } + } catch (epi::SNPNotFoundError &err) { + stderr_mutex.lock(); + std::cerr << "Unknown SNP " << err.get_name() << std::endl; + stderr_mutex.unlock(); + rapidjson::Value json(rapidjson::kObjectType); + json.AddMember("ok", false, alloc); + json.AddMember("reason", + rapidjson::Value(("unknown SNP " + err.get_name()).c_str(), alloc).Move(), + alloc); + json.AddMember("unknown_snp", rapidjson::Value(err.get_name().c_str(), alloc).Move(), alloc); + if (unknown_scores.Capacity() > 0) + json.AddMember("unknown_or_unavailable_scores", unknown_scores.Move(), alloc); + res->writeStatus("404 Not Found"); + res->end(json_to_str(json)); + } catch (epi::Error &err) { + stderr_mutex.lock(); + std::cerr << "epi::Error " << err.what() << std::endl; + stderr_mutex.unlock(); + rapidjson::Value json(rapidjson::kObjectType); + json.AddMember("ok", false, alloc); + json.AddMember("reason", "internal error", alloc); + if (unknown_scores.Capacity() > 0) + json.AddMember("unknown_or_unavailable_scores", unknown_scores.Move(), alloc); + res->writeStatus("500 Internal Error"); + res->end(json_to_str(json)); + } + } + }).listen(bind_addr, bind_port, [&stderr_mutex, &bind_addr, &bind_port](auto *listen_socket) { + stderr_mutex.lock(); + if (listen_socket) { + std::cout << "[Thread-" << std::this_thread::get_id() << "] Listening on " << bind_addr << ':' << bind_port << std::endl; + } else { + std::cerr << "[Thread-" << std::this_thread::get_id() << "] Failed to listen on " << bind_addr << ':' << bind_port << std::endl; + } + stderr_mutex.unlock(); + }).run(); + }); + }); + + std::for_each(threads.begin(), threads.end(), [](std::thread *t) { + t->join(); + }); - LOG_INFO << "Server running on " << bind_addr << ':' << bind_port << " with " << num_threads << " threads"; - drogon::app().addListener(bind_addr, bind_port); - drogon::app().run(); return 0; } From 6ca1799a446c392472ce7b5fcbce32b0f77282a0 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 16:57:28 +0200 Subject: [PATCH 03/10] added test e2e test for realtime_scores --- docker/NeEDL/Dockerfile | 7 +++++-- install.py | 2 +- test/e2e/test_container.sh | 6 ++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docker/NeEDL/Dockerfile b/docker/NeEDL/Dockerfile index b6baed502..dd4c49319 100644 --- a/docker/NeEDL/Dockerfile +++ b/docker/NeEDL/Dockerfile @@ -51,7 +51,8 @@ RUN python3.10 install.py --clean && \ python3.10 install.py --target NeEDL && \ python3.10 install.py --target calculate_scores && \ python3.10 install.py --target convert_to_binary && \ - python3.10 install.py --target epiJSON + python3.10 install.py --target epiJSON && \ + python3.10 install.py --target realtime_scores # ---- end of stage 2 @@ -78,5 +79,7 @@ COPY --from=build /NeEDL/ext/plink/plink_linux_x86_64_20230116/ /NeEDL/ext/plink RUN ln -s /NeEDL/test/model/bin/NeEDL /usr/local/bin/NeEDL && \ ln -s /NeEDL/test/model/bin/calculate_scores /usr/local/bin/calculate_scores && \ ln -s /NeEDL/test/model/bin/convert_to_binary /usr/local/bin/convert_to_binary && \ - ln -s /NeEDL/test/model/bin/convert_to_json /usr/local/bin/epiJSON + ln -s /NeEDL/test/model/bin/epiJSON /usr/local/bin/epiJSON && \ + ln -s /NeEDL/test/model/bin/realtime_scores /usr/local/bin/realtime_scores + # ---- end of stage 3 \ No newline at end of file diff --git a/install.py b/install.py index 0ca91e06d..c73cd9a16 100755 --- a/install.py +++ b/install.py @@ -134,7 +134,7 @@ def extract_all_zips_in_folder(folder): for entry in files: if entry.name.endswith(".zip"): print("Extracting " + entry.name) - subprocess.call('unzip -n -d "' + escaped_folder + '" "' + escaped_folder + entry.name.replace('"', '\\"') + '"', shell=True) + subprocess.call('unzip -q -d "' + escaped_folder + '" "' + escaped_folder + entry.name.replace('"', '\\"') + '"', shell=True) def extract_resources(args): if (os.path.isfile(".RES_EXTRACTED") and not args.clean) or args.no_data_unpacking: diff --git a/test/e2e/test_container.sh b/test/e2e/test_container.sh index b80e3c758..41046bfeb 100755 --- a/test/e2e/test_container.sh +++ b/test/e2e/test_container.sh @@ -25,6 +25,12 @@ python ./run/epiJSON.py --docker-image-name "$1" --docker-no-pulling --help python ./run/calculate_scores.py --docker-image-name "$1" --docker-no-pulling --help python ./run/convert_to_binary.py --docker-image-name "$1" --docker-no-pulling --help +# realtime_scores is a special case --> no python launcher script exists for it +# run containers as current user +user_id=$(id -u) +group_id=$(id -g) +docker run --user $user_id:$group_id "$1" /NeEDL/test/model/bin/realtime_scores --help + # select small dummy dataset From 570b031995733926ce158bf35d09335ec18958e1 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 18:01:16 +0200 Subject: [PATCH 04/10] fixed docker build issues with uWebSockets --- .github/workflows/docker-publish.yml | 2 ++ install.py | 8 ++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index c89dbf15c..e18e4fdf0 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -25,6 +25,8 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v3 + with: + submodules: 'recursive' # Workaround: https://github.com/docker/build-push-action/issues/461 - name: Setup Docker buildx diff --git a/install.py b/install.py index c73cd9a16..5601221c4 100755 --- a/install.py +++ b/install.py @@ -105,7 +105,10 @@ def build_external_libraries(args, dependencies): print("-- uWebSocket library already built.") else: print("-- Building uWebSockets library") - subprocess.call('git submodule update --init --recursive; cd ext/uWebSockets/uSockets;make boringssl;cd boringssl;BORINGSSL=$PWD;cd ../lsquic;cmake -DBORINGSSL_DIR=$BORINGSSL .;make;cd ..;WITH_LTO=1 WITH_QUIC=1 WITH_BORINGSSL=1 make', shell=True) + if not args.no_submodule_extraction: + subprocess.call('git submodule update --init --recursive') + + subprocess.call('cd ext/uWebSockets/uSockets;make boringssl;cd boringssl;BORINGSSL=$PWD;cd ../lsquic;cmake -DBORINGSSL_DIR=$BORINGSSL .;make;cd ..;WITH_LTO=1 WITH_QUIC=1 WITH_BORINGSSL=1 make', shell=True) f = open("ext/.uWebSockets_INSTALLED", "w") f.close() @@ -134,7 +137,7 @@ def extract_all_zips_in_folder(folder): for entry in files: if entry.name.endswith(".zip"): print("Extracting " + entry.name) - subprocess.call('unzip -q -d "' + escaped_folder + '" "' + escaped_folder + entry.name.replace('"', '\\"') + '"', shell=True) + subprocess.call('unzip -qo -d "' + escaped_folder + '" "' + escaped_folder + entry.name.replace('"', '\\"') + '"', shell=True) def extract_resources(args): if (os.path.isfile(".RES_EXTRACTED") and not args.clean) or args.no_data_unpacking: @@ -209,6 +212,7 @@ def find_dependencies(args): parser.add_argument("--gcc", help="one can select the path to gcc manually if the automatically selected compiler is not correct.", default=None) parser.add_argument("--gxx", help="one can select the path to g++ manually if the automatically selected compiler is not correct.", default=None) parser.add_argument("--extract-datasets", help="Also extracts simulated datasets. These might be necessary for the unit tests.", action="store_true") +parser.add_argument("--no-submodule-extraction", help="When set the script does not recursively download submodules. This is used for building the docker container.", action="store_true") args = parser.parse_args() From fe074c8d7a6a9ae7bd1fb0daa831486aad6db025 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 18:04:20 +0200 Subject: [PATCH 05/10] another fix --- docker/NeEDL/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/NeEDL/Dockerfile b/docker/NeEDL/Dockerfile index dd4c49319..d78243f58 100644 --- a/docker/NeEDL/Dockerfile +++ b/docker/NeEDL/Dockerfile @@ -47,7 +47,7 @@ COPY data/dbSNP/inc_pseudogenes /NeEDL/data/dbSNP/inc_pseudogenes COPY data/BIOGRID /NeEDL/data/BIOGRID # initialize everything needed for the build and build all targets -RUN python3.10 install.py --clean && \ +RUN python3.10 install.py --clean --no-submodule-extraction && \ python3.10 install.py --target NeEDL && \ python3.10 install.py --target calculate_scores && \ python3.10 install.py --target convert_to_binary && \ From e633ddae77d3adb903e302dbe2e1597815950347 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Thu, 14 Sep 2023 19:18:47 +0200 Subject: [PATCH 06/10] added go to the dependencies for the docker container --- docker/NeEDL/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/NeEDL/Dockerfile b/docker/NeEDL/Dockerfile index d78243f58..004d768b2 100644 --- a/docker/NeEDL/Dockerfile +++ b/docker/NeEDL/Dockerfile @@ -32,7 +32,9 @@ WORKDIR /NeEDL ENV DEBIAN_FRONTEND=noninteractive # install build dependencies -RUN apt-get update -y && apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip +RUN apt-get update -y && \ + apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip wget && \ + wget -c https://dl.google.com/go/go1.21.1.linux-amd64.tar.gz -O - | sudo tar -xz -C /usr/local # copy necessary NeEDL repo files COPY ext ./ext From a1552c8142add0b4cdb1f4bd601acd85b11233f1 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Fri, 15 Sep 2023 12:27:19 +0200 Subject: [PATCH 07/10] fix go installation for docker container --- .dockerignore | 4 ++++ docker/NeEDL/Dockerfile | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/.dockerignore b/.dockerignore index 9b461f292..2b79beb76 100644 --- a/.dockerignore +++ b/.dockerignore @@ -108,6 +108,8 @@ test/model/res/* /cmake-build-* /.idea/ +CMakeCache.txt + #load # /data/LOAD/ # /data/CAD_control/* @@ -123,6 +125,8 @@ test/model/res/* /ext/boost_1_71_0/tools/build/src/engine/b2 /ext/boost_1_71_0/tools/build/src/engine/bjam +/ext/uWebSockets/uSockets/*.o + .Rhistory diff --git a/docker/NeEDL/Dockerfile b/docker/NeEDL/Dockerfile index 004d768b2..d2b365b36 100644 --- a/docker/NeEDL/Dockerfile +++ b/docker/NeEDL/Dockerfile @@ -33,8 +33,8 @@ ENV DEBIAN_FRONTEND=noninteractive # install build dependencies RUN apt-get update -y && \ - apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip wget && \ - wget -c https://dl.google.com/go/go1.21.1.linux-amd64.tar.gz -O - | sudo tar -xz -C /usr/local + apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip curl libevent-dev && \ + curl https://dl.google.com/go/go1.21.1.linux-amd64.tar.gz | tar -xz -C /usr/local # copy necessary NeEDL repo files COPY ext ./ext From bcc17dd6e3a0759d895cc7e3aab645259717d1f6 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Fri, 22 Sep 2023 12:18:25 +0200 Subject: [PATCH 08/10] docker realtime_scores now builds --- .dockerignore | 3 ++- .gitignore | 6 +++--- CMakeLists.txt | 26 +++++++++++++++++++++----- docker/NeEDL/Dockerfile | 21 ++++++++++++--------- install.py | 9 ++++++++- 5 files changed, 46 insertions(+), 19 deletions(-) diff --git a/.dockerignore b/.dockerignore index 2b79beb76..90acc119e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -23,6 +23,7 @@ docs/ # Binary directories **/bin +!/ext/uWebSockets/uSockets/lsquic/bin # INI files for unit tests /test/unit/init/* @@ -108,7 +109,7 @@ test/model/res/* /cmake-build-* /.idea/ -CMakeCache.txt +**/CMakeCache.txt #load # /data/LOAD/ diff --git a/.gitignore b/.gitignore index 22b2a44de..55873d2d7 100644 --- a/.gitignore +++ b/.gitignore @@ -119,9 +119,9 @@ test/model/res/* .Rhistory -.INSTALLED -.uWebSockets_INSTALLED -.RES_EXTRACTED +**/.INSTALLED +**/.uWebSockets_INSTALLED +**/.RES_EXTRACTED .env docker/data diff --git a/CMakeLists.txt b/CMakeLists.txt index 61fa4c660..cad859a7e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -19,10 +19,12 @@ # # ############################################################################### -cmake_minimum_required(VERSION 3.2) +cmake_minimum_required(VERSION 3.8) set(CMAKE_VERBOSE_MAKEFILE ON) project(NeEDL) +include(CMakePrintHelpers) + # Determine build type. if(NOT CMAKE_BUILD_TYPE) message(STATUS "Building NeEDL with build type 'Release', as none was specified.") @@ -31,6 +33,22 @@ else() message(STATUS "Building NeEDL with build type '${CMAKE_BUILD_TYPE}'.") endif() +set(USE_INCLUDED_BOOST "1" CACHE BOOL "Use the boost version shipped with this repository") +cmake_print_variables(USE_INCLUDED_BOOST) +if (USE_INCLUDED_BOOST) + message("-- boost version: vendored") + set(BOOST_INCLUDE ${CMAKE_SOURCE_DIR}/ext/boost_1_71_0) + set(BOOST_LINK_DIR ${CMAKE_SOURCE_DIR}/ext/boost_1_71_0/stage/lib) +else() + message("-- boost version: system") + set(Boost_USE_STATIC_LIBS OFF) + set(Boost_USE_MULTITHREADED ON) + set(Boost_USE_STATIC_RUNTIME OFF) + find_package(Boost 1.71.0 REQUIRED COMPONENTS filesystem graph) + + set(BOOST_INCLUDE ${Boost_INCLUDE_DIRS}) +endif() + # Find doxygen. #find_package(Doxygen) #if(DOXYGEN_FOUND) @@ -43,7 +61,6 @@ find_package(Python COMPONENTS Interpreter Development) find_package(SQLite3 REQUIRED) # debug stuff -include(CMakePrintHelpers) cmake_print_variables(CMAKE_C_COMPILER) cmake_print_variables(CMAKE_C_COMPILER_VERSION) cmake_print_variables(CMAKE_CXX_COMPILER) @@ -67,15 +84,14 @@ endif() set(CMAKE_MACOSX_RPATH ON) # Add include and link directories. -set(BOOST_ROOT ${CMAKE_SOURCE_DIR}/ext/boost_1_71_0) set(EIGEN_ROOT ${CMAKE_SOURCE_DIR}/ext/eigen_3_3_7) set(CLI11_ROOT ${CMAKE_SOURCE_DIR}/ext/cli11_1_9_0) set(CATCH_ROOT ${CMAKE_SOURCE_DIR}/ext/catch_2_13_9) set(IGRAPH_ROOT ${CMAKE_SOURCE_DIR}/ext/igraph_0.9.8) set(PYBIND11_HOME ${CMAKE_SOURCE_DIR}/ext/pybind11) -include_directories(SYSTEM ${SQLite3_INCLUDE_DIRS} ${BOOST_ROOT} ${EIGEN_ROOT} ${CLI11_ROOT} ${CATCH_ROOT} ${IGRAPH_ROOT}/include ${IGRAPH_ROOT}/build/include ${PYBIND11_HOME}/include ${SQLite3_INCLUDE_DIRS} ${Python_INCLUDE_DIRS} /usr/lib/llvm-10/include) -link_directories(${BOOST_ROOT}/stage/lib /usr/lib/llvm-10/lib ${IGRAPH_ROOT}/build/src ${Python_LIBRARY_DIRS} ${SQLite3_LIBRARIES}) +include_directories(SYSTEM ${SQLite3_INCLUDE_DIRS} ${BOOST_INCLUDE} ${EIGEN_ROOT} ${CLI11_ROOT} ${CATCH_ROOT} ${IGRAPH_ROOT}/include ${IGRAPH_ROOT}/build/include ${PYBIND11_HOME}/include ${SQLite3_INCLUDE_DIRS} ${Python_INCLUDE_DIRS} /usr/lib/llvm-10/include) +link_directories(${BOOST_LINK_DIR} /usr/lib/llvm-10/lib ${IGRAPH_ROOT}/build/src ${Python_LIBRARY_DIRS} ${SQLite3_LIBRARIES}) if(APPLE) include_directories(SYSTEM ${OMP_ROOT}/include) link_directories(${OMP_ROOT}/lib) diff --git a/docker/NeEDL/Dockerfile b/docker/NeEDL/Dockerfile index d2b365b36..4b9c59e28 100644 --- a/docker/NeEDL/Dockerfile +++ b/docker/NeEDL/Dockerfile @@ -33,9 +33,12 @@ ENV DEBIAN_FRONTEND=noninteractive # install build dependencies RUN apt-get update -y && \ - apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip curl libevent-dev && \ + apt-get install -y build-essential git cmake autoconf libtool pkg-config python3.10 python3.10-dev gcc g++ sqlite3 libsqlite3-dev python3.10-distutils unzip curl libevent-dev libboost-all-dev && \ curl https://dl.google.com/go/go1.21.1.linux-amd64.tar.gz | tar -xz -C /usr/local +ENV PATH /usr/local/go/bin:$PATH + + # copy necessary NeEDL repo files COPY ext ./ext COPY src ./src @@ -49,12 +52,12 @@ COPY data/dbSNP/inc_pseudogenes /NeEDL/data/dbSNP/inc_pseudogenes COPY data/BIOGRID /NeEDL/data/BIOGRID # initialize everything needed for the build and build all targets -RUN python3.10 install.py --clean --no-submodule-extraction && \ - python3.10 install.py --target NeEDL && \ - python3.10 install.py --target calculate_scores && \ - python3.10 install.py --target convert_to_binary && \ - python3.10 install.py --target epiJSON && \ - python3.10 install.py --target realtime_scores +RUN python3.10 install.py --clean --no-submodule-extraction --system-boost && \ + python3.10 install.py --target NeEDL --system-boost && \ + python3.10 install.py --target calculate_scores --system-boost && \ + python3.10 install.py --target convert_to_binary --system-boost && \ + python3.10 install.py --target epiJSON --system-boost && \ + python3.10 install.py --target realtime_scores --system-boost # ---- end of stage 2 @@ -68,10 +71,10 @@ LABEL org.opencontainers.image.source=https://github.com/biomedbigdata/NeEDL ENV DEBIAN_FRONTEND=noninteractive # install production dependencies -RUN apt-get update -y && apt-get install -y python3.10 python3.10-dev sqlite3 g++ gcc +RUN apt-get update -y && apt-get install -y python3.10 python3.10-dev sqlite3 g++ gcc libboost-all-dev # copy everything important over to the new container -COPY --from=build /NeEDL/ext/boost_1_71_0/stage /NeEDL/ext/boost_1_71_0/stage +# COPY --from=build /NeEDL/ext/boost_1_71_0/stage /NeEDL/ext/boost_1_71_0/stage COPY --from=build /NeEDL/test/model/bin/* /NeEDL/test/model/bin/ COPY --from=build /NeEDL/data/dbSNP/inc_pseudogenes/snps_restruc_full_inc_pseudo.csv /NeEDL/data/dbSNP/inc_pseudogenes/snps_restruc_full_inc_pseudo.csv COPY --from=build /NeEDL/data/BIOGRID/BIOGRID-ORGANISM-Homo_sapiens-3.5.182.tab2.txt /NeEDL/data/BIOGRID/BIOGRID-ORGANISM-Homo_sapiens-current.tab2.txt diff --git a/install.py b/install.py index 5601221c4..3c12684f0 100755 --- a/install.py +++ b/install.py @@ -53,6 +53,10 @@ def build_targets(args, all_targets, dependencies): if (not os.path.isfile("build/Makefile")): print("-- Running CMake.") commands = "cd build; rm -rf *; cmake .. -DCMAKE_BUILD_TYPE=" + ("Debug" if args.debug else "Release") + + if args.system_boost: + commands += f' -DUSE_INCLUDED_BOOST=0' + commands += f' -DPython_EXECUTABLE="{dependencies["python3"]}"' commands += f' -DCMAKE_C_COMPILER="{dependencies["gcc"]}" -DCMAKE_CXX_COMPILER="{dependencies["g++"]}"' if platform.system() == "Darwin": @@ -75,7 +79,9 @@ def build_targets(args, all_targets, dependencies): def build_external_libraries(args, dependencies): # boost - if os.path.isfile("ext/boost_1_71_0/.INSTALLED") and not args.clean: + if args.system_boost: + print("-- Boost libraries: system installation is used --> build of boost is skipped") + elif os.path.isfile("ext/boost_1_71_0/.INSTALLED") and not args.clean: print("-- Boost libraries already built.") else: print("-- Building Boost libraries.") @@ -213,6 +219,7 @@ def find_dependencies(args): parser.add_argument("--gxx", help="one can select the path to g++ manually if the automatically selected compiler is not correct.", default=None) parser.add_argument("--extract-datasets", help="Also extracts simulated datasets. These might be necessary for the unit tests.", action="store_true") parser.add_argument("--no-submodule-extraction", help="When set the script does not recursively download submodules. This is used for building the docker container.", action="store_true") +parser.add_argument("--system-boost", help="Use the system boost installation instead of the one in this repository", action="store_true") args = parser.parse_args() From de502c3fb0d69ca87b244eb88b06e6aabce31485 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Fri, 22 Sep 2023 12:59:21 +0200 Subject: [PATCH 09/10] updated docker-compose, improved NeEDL compile time --- docker/realtime_scores/Dockerfile | 37 ----------------------- docker/realtime_scores/docker-compose.yml | 7 ++--- quepistasis/header/cpu_sa.h | 4 +++ quepistasis/header/cpu_sa_wrapper.h | 5 +++ quepistasis/header/python_wrapper.h | 5 +++ quepistasis/header/snps_optimization.h | 5 +++ src/jobs/ShinyAppLauncher.hpp | 4 +++ test/model/CMakeLists.txt | 8 ++++- test/model/src/NeEDL.cpp | 7 ++--- 9 files changed, 35 insertions(+), 47 deletions(-) delete mode 100644 docker/realtime_scores/Dockerfile diff --git a/docker/realtime_scores/Dockerfile b/docker/realtime_scores/Dockerfile deleted file mode 100644 index 516fdf324..000000000 --- a/docker/realtime_scores/Dockerfile +++ /dev/null @@ -1,37 +0,0 @@ -FROM ubuntu:latest as base - -WORKDIR /NeEDL - -ENV DEBIAN_FRONTEND=noninteractive - -# copy necessary NeEDL repo files -COPY ext ./ext -COPY src ./src -COPY test ./test -COPY CMakeLists.txt ./ -COPY install.py ./ - -RUN mkdir -p ./uploads/tmp - -# install dependencies -RUN apt-get update && \ - apt-get install -y build-essential git cmake autoconf libtool pkg-config python3 python3-dev libjsoncpp-dev uuid-dev openssl libssl-dev zlib1g-dev libbrotli-dev gcc g++ sqlite3 libsqlite3-dev - - -# add symlink to jsoncpp include files -RUN ln -s /usr/include/jsoncpp/json /usr/include/json - - -# build drogon web framework -RUN git clone https://github.com/drogonframework/drogon drogon -WORKDIR /NeEDL/drogon -RUN git submodule update --init -RUN mkdir build -WORKDIR /NeEDL/drogon/build -RUN cmake -DCMAKE_BUILD_TYPE=Release -DBUILD_MYSQL=OFF -DBUILD_POSTGRESQL=OFF -DBUILD_REDIS=OFF -DBUILD_SQLITE=OFF -DBUILD_EXAMPLES=OFF -DBUILD_CTL=OFF .. -RUN make -RUN make install -WORKDIR /NeEDL - -# build the realtime_score program -RUN ./install.py --target realtime_scores --no-data-unpacking --clean diff --git a/docker/realtime_scores/docker-compose.yml b/docker/realtime_scores/docker-compose.yml index cb0c9b491..96db62560 100644 --- a/docker/realtime_scores/docker-compose.yml +++ b/docker/realtime_scores/docker-compose.yml @@ -2,12 +2,9 @@ version: '3' services: realtime_scores: + image: bigdatainbiomedicine/needl restart: always - command: ./test/model/bin/realtime_scores --input-dir /datasets --num-threads $NUM_THREADS --bind-addr 0.0.0.0 --bind-port 3000 - build: - context: ../../ - dockerfile: docker/realtime_scores/Dockerfile - # target: base + command: /NeEDL/test/model/bin/realtime_scores --input-dir /datasets --num-threads $NUM_THREADS --bind-addr 0.0.0.0 --bind-port 3000 volumes: - ${DATASET_DIR}:/datasets:Z user: ${REALTIME_SCORES_USER} diff --git a/quepistasis/header/cpu_sa.h b/quepistasis/header/cpu_sa.h index 936ac8907..04806980e 100644 --- a/quepistasis/header/cpu_sa.h +++ b/quepistasis/header/cpu_sa.h @@ -64,4 +64,8 @@ int general_simulated_annealing( void * const interrupt_function ); +#ifdef HEADER_ONLY +#include "../src/cpu_sa.cpp" +#endif + #endif \ No newline at end of file diff --git a/quepistasis/header/cpu_sa_wrapper.h b/quepistasis/header/cpu_sa_wrapper.h index 1d618f0e3..8c3d6ee85 100644 --- a/quepistasis/header/cpu_sa_wrapper.h +++ b/quepistasis/header/cpu_sa_wrapper.h @@ -39,4 +39,9 @@ void simulated_annealing_ising( uint64_t seed, sa_return* ret); + +#ifdef HEADER_ONLY +#include "../src/cpu_sa_wrapper.cpp" +#endif + #endif \ No newline at end of file diff --git a/quepistasis/header/python_wrapper.h b/quepistasis/header/python_wrapper.h index e3c367033..5fcf53113 100644 --- a/quepistasis/header/python_wrapper.h +++ b/quepistasis/header/python_wrapper.h @@ -209,4 +209,9 @@ class PythonWrapper { int clique_size, int shots, const char* save_path); }; + +#ifdef HEADER_ONLY +#include "../src/python_wrapper.cpp" +#endif + #endif \ No newline at end of file diff --git a/quepistasis/header/snps_optimization.h b/quepistasis/header/snps_optimization.h index dcdd5610f..bcfe4df6c 100644 --- a/quepistasis/header/snps_optimization.h +++ b/quepistasis/header/snps_optimization.h @@ -244,4 +244,9 @@ class snps_qubo_matrix : public matrix { }; + +#ifdef HEADER_ONLY +#include "../src/snps_optimization.cpp" +#endif + #endif \ No newline at end of file diff --git a/src/jobs/ShinyAppLauncher.hpp b/src/jobs/ShinyAppLauncher.hpp index a33a19f5d..ce171ad52 100644 --- a/src/jobs/ShinyAppLauncher.hpp +++ b/src/jobs/ShinyAppLauncher.hpp @@ -21,4 +21,8 @@ namespace epi { } // epi +#ifdef HEADER_ONLY +#include "ShinyAppLauncher.cpp" +#endif + #endif //GENEPISEEKER_SHINYAPPLAUNCHER_HPP diff --git a/test/model/CMakeLists.txt b/test/model/CMakeLists.txt index 1a448d778..ed346386e 100644 --- a/test/model/CMakeLists.txt +++ b/test/model/CMakeLists.txt @@ -32,7 +32,13 @@ if(APPLE) target_link_libraries(compare_models omp) endif() -add_executable(NeEDL src/NeEDL.cpp src/util.cpp ${MAIN_SOURCES_NEW} ${QUANTUM_COMPUTING}) +if (CMAKE_BUILD_TYPE STREQUAL "Release") + add_compile_definitions(CMAKE_RELEASE) + add_executable(NeEDL src/NeEDL.cpp src/util.cpp) +else() + add_executable(NeEDL src/NeEDL.cpp src/util.cpp ${MAIN_SOURCES_NEW} ${QUANTUM_COMPUTING}) +endif() + target_link_libraries(NeEDL sqlite3 boost_filesystem igraph ${Python_LIBRARIES}) if(APPLE) target_link_libraries(NeEDL omp) diff --git a/test/model/src/NeEDL.cpp b/test/model/src/NeEDL.cpp index 8bfac945f..0760a9763 100644 --- a/test/model/src/NeEDL.cpp +++ b/test/model/src/NeEDL.cpp @@ -2,15 +2,14 @@ // Created by juli on 25.05.22. // +#ifdef CMAKE_RELEASE + #define HEADER_ONLY +#endif #include #include #include "../../src/pipelines/NeEDLPipeline.hpp" -#include "../../src/util/helper_functions.hpp" -#include "../../../src/jobs/SeedingRandomConnected.hpp" -#include "../../../src/jobs/SeedingCommunityWise.hpp" -#include "../../../src/jobs/SeedingQuantumComputing.hpp" From 5b2b32e7b48588c44ebc6107ad23a8768cc5dc33 Mon Sep 17 00:00:00 2001 From: juli-p <63345753+juli-p@users.noreply.github.com> Date: Fri, 22 Sep 2023 13:06:36 +0200 Subject: [PATCH 10/10] set docker compose name --- docker/realtime_scores/docker-compose.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/realtime_scores/docker-compose.yml b/docker/realtime_scores/docker-compose.yml index 96db62560..a9509ec26 100644 --- a/docker/realtime_scores/docker-compose.yml +++ b/docker/realtime_scores/docker-compose.yml @@ -1,4 +1,5 @@ version: '3' +name: needl_realtime_scores_api services: realtime_scores: