diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 97db4c40e5..72ff158c8e 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-2 +FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 ARG USERNAME=admin USER root diff --git a/.github/workflows/build-core-ut.yaml b/.github/workflows/build-core-ut.yaml new file mode 100644 index 0000000000..3ce9da3075 --- /dev/null +++ b/.github/workflows/build-core-ut.yaml @@ -0,0 +1,89 @@ +# Copyright 2021 iLogtail Authors +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name: Build Core Unit Test + +on: + pull_request: + paths-ignore: + - 'docs/**' + - 'example_config/**' + - 'docker/**' + - 'k8s_template/**' + - 'changes/**' + - 'licenses/**' + - 'CHANGELOG.md' + push: + branches: + - main + - 1.* + +jobs: + CI: + runs-on: ${{ matrix.runner }} + timeout-minutes: 60 + strategy: + matrix: + go-version: [1.19] + # https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idruns-on + runner: [ubuntu-latest] + fail-fast: true + steps: + # Clean up space to prevent action from running out of disk space. + - name: Free disk space + if: matrix.runner == 'ubuntu-latest' + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf /opt/ghc + sudo rm -rf "/usr/local/share/boost" + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + sudo -E apt-get -qq autoremove --purge + sudo -E apt-get -qq clean + + - name: Check disk space + run: | + df -hT $PWD + + - name: Set up Go ${{ matrix.go-version }} + uses: actions/setup-go@v2 + with: + go-version: ${{ matrix.go-version }} + + - name: Check out code + uses: actions/checkout@v2 + with: + submodules: true + + - name: Build Unit Test + if: matrix.runner == 'ubuntu-latest' + env: + BUILD_LOGTAIL: OFF + BUILD_LOGTAIL_UT: ON + ENABLE_COMPATIBLE_MODE: ON + ENABLE_STATIC_LINK_CRT: ON + WITHOUTGDB: ON + # BUILD_TYPE: Debug # TODO: Uncomment when memory management is refined + run: make core + + - name: Unit Test + if: matrix.runner == 'ubuntu-latest' + run: make unittest_core + + result: + runs-on: ubuntu-latest + timeout-minutes: 30 + needs: [CI] + steps: + - name: Build Result + run: echo "Just to make the GitHub merge button green" diff --git a/.github/workflows/build-core.yaml b/.github/workflows/build-core.yaml index 261f3ac54c..ae94e02a18 100644 --- a/.github/workflows/build-core.yaml +++ b/.github/workflows/build-core.yaml @@ -37,15 +37,11 @@ jobs: matrix: go-version: [1.19] # https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idruns-on - runner: [ubuntu-latest, macos-latest, windows-2019] + runner: [ubuntu-latest] fail-fast: true steps: - # prepare ubuntu environment - - name: prepare ubuntu environment - if: matrix.runner == 'ubuntu-latest' - run: sudo apt-get clean && sudo apt-get update && sudo apt-get install -y libsystemd-dev # Clean up space to prevent action from running out of disk space. - - name: clean + - name: Free disk space if: matrix.runner == 'ubuntu-latest' run: | sudo rm -rf /usr/share/dotnet @@ -54,23 +50,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD - # prepare windows environment - # https://github.com/actions/virtual-environments/issues/2549 - - name: add mingw32 to path - if: matrix.runner == 'windows-2019' - shell: bash - run: | - echo "C:\msys64\mingw32\bin" >> $GITHUB_PATH - name: Set up Go ${{ matrix.go-version }} uses: actions/setup-go@v2 with: go-version: ${{ matrix.go-version }} - - - name: Check out code into the Go module directory + + - name: Check out code uses: actions/checkout@v2 with: submodules: true @@ -78,15 +68,20 @@ jobs: - name: Build Binary if: matrix.runner == 'ubuntu-latest' env: - BUILD_LOGTAIL_UT: ON + BUILD_LOGTAIL_UT: OFF ENABLE_COMPATIBLE_MODE: ON ENABLE_STATIC_LINK_CRT: ON WITHOUTGDB: ON run: make core - - name: Unit Test + - name: Check compatibility if: matrix.runner == 'ubuntu-latest' - run: make unittest_core + env: + BUILD_LOGTAIL_UT: OFF + ENABLE_COMPATIBLE_MODE: ON + ENABLE_STATIC_LINK_CRT: ON + WITHOUTGDB: ON + run: make dist && scripts/check_glibc.sh result: runs-on: ubuntu-latest diff --git a/.github/workflows/build-pure-plugin.yaml b/.github/workflows/build-pure-plugin.yaml index ccd23008b6..2705c2d0dd 100644 --- a/.github/workflows/build-pure-plugin.yaml +++ b/.github/workflows/build-pure-plugin.yaml @@ -44,8 +44,9 @@ jobs: - name: prepare ubuntu environment if: matrix.runner == 'ubuntu-latest' run: sudo apt-get clean && sudo apt-get update && sudo apt-get install -y libsystemd-dev + # Clean up space to prevent action from running out of disk space. - - name: clean + - name: Free disk space if: matrix.runner == 'ubuntu-latest' run: | sudo rm -rf /usr/share/dotnet @@ -54,9 +55,11 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD + # prepare windows environment # https://github.com/actions/virtual-environments/issues/2549 - name: add mingw32 to path @@ -70,7 +73,7 @@ jobs: with: go-version: ${{ matrix.go-version }} - - name: Check out code into the Go module directory + - name: Check out code uses: actions/checkout@v2 with: submodules: true diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 4e217dc1ee..68309cbea1 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -36,15 +36,11 @@ jobs: matrix: go-version: [1.19] # https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#jobsjob_idruns-on - runner: [ubuntu-latest, macos-latest, windows-2019] + runner: [ubuntu-latest] fail-fast: true steps: - # prepare ubuntu environment - - name: prepare ubuntu environment - if: matrix.runner == 'ubuntu-latest' - run: sudo apt-get clean && sudo apt-get update && sudo apt-get install -y libsystemd-dev # Clean up space to prevent action from running out of disk space. - - name: clean + - name: Free disk space if: matrix.runner == 'ubuntu-latest' run: | sudo rm -rf /usr/share/dotnet @@ -53,23 +49,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD - # prepare windows environment - # https://github.com/actions/virtual-environments/issues/2549 - - name: add mingw32 to path - if: matrix.runner == 'windows-2019' - shell: bash - run: | - echo "C:\msys64\mingw32\bin" >> $GITHUB_PATH - name: Set up Go ${{ matrix.go-version }} uses: actions/setup-go@v2 with: go-version: ${{ matrix.go-version }} - - - name: Check out code into the Go module directory + + - name: Check out code uses: actions/checkout@v2 with: submodules: true @@ -81,7 +71,7 @@ jobs: ENABLE_COMPATIBLE_MODE: ON ENABLE_STATIC_LINK_CRT: ON WITHOUTGDB: ON - run: make dist && scripts/check_glibc.sh + run: make dist - name: Build Docker if: matrix.runner == 'ubuntu-latest' diff --git a/.github/workflows/e2e-core.yaml b/.github/workflows/e2e-framework.yaml similarity index 89% rename from .github/workflows/e2e-core.yaml rename to .github/workflows/e2e-framework.yaml index 94a50d294a..61e949a63b 100644 --- a/.github/workflows/e2e-core.yaml +++ b/.github/workflows/e2e-framework.yaml @@ -39,7 +39,7 @@ jobs: fail-fast: true steps: # Clean up space to prevent action from running out of disk space. - - name: clean + - name: Free disk space if: matrix.runner == 'ubuntu-latest' run: | sudo rm -rf /usr/share/dotnet @@ -48,9 +48,11 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD + - name: Set up Go ${{ matrix.go-version }} uses: actions/setup-go@v2 with: @@ -66,12 +68,18 @@ jobs: sudo curl -SL https://github.com/docker/compose/releases/download/v2.7.0/docker-compose-linux-x86_64 -o /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose - - name: E2E Core Structure Test + - name: E2E Plugin Framework Test env: BUILD_LOGTAIL_UT: OFF WITHOUTGDB: ON run: make e2e-core + # Framework Test will provide local image that Unit Test required + - name: E2E Engine Unit Test + if: matrix.runner == 'ubuntu' + run: | + make unittest_e2e_engine + result: runs-on: ubuntu-latest timeout-minutes: 60 diff --git a/.github/workflows/e2e.yaml b/.github/workflows/e2e.yaml index 9c4f9893ce..f4ac940161 100644 --- a/.github/workflows/e2e.yaml +++ b/.github/workflows/e2e.yaml @@ -48,15 +48,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD + - name: Set up Go ${{ matrix.go-version }} uses: actions/setup-go@v2 with: go-version: ${{ matrix.go-version }} - - name: Check out code into the Go module directory + - name: Check out code uses: actions/checkout@v2 with: submodules: true @@ -66,23 +68,18 @@ jobs: sudo curl -SL https://github.com/docker/compose/releases/download/v2.7.0/docker-compose-linux-x86_64 -o /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose - - name: Kernel version - run: uname -r - - name: Docker version - run: docker --version + - name: System environment + run: | + uname -r + docker --version + go version - - name: E2E Test + - name: E2E Behavior Test env: BUILD_LOGTAIL_UT: OFF WITHOUTGDB: ON run: make e2e - - name: UnitTest E2e Engine - if: matrix.runner == 'ubuntu' - run: | - go version - make unittest_e2e_engine - result: runs-on: ubuntu-latest timeout-minutes: 60 diff --git a/.github/workflows/static-check.yaml b/.github/workflows/static-check.yaml index 9f9595b5f4..cbeaa8c8d4 100644 --- a/.github/workflows/static-check.yaml +++ b/.github/workflows/static-check.yaml @@ -37,16 +37,16 @@ jobs: strategy: matrix: go-version: [ 1.19 ] - runner: [ ubuntu, macos ] + runner: [ ubuntu ] fail-fast: true steps: - # prepare ubuntu environment - name: prepare ubuntu environment if: matrix.runner == 'ubuntu' run: sudo apt-get clean && sudo apt-get update && sudo apt-get install -y libsystemd-dev + # Clean up space to prevent action from running out of disk space. - - name: clean + - name: Free disk space if: matrix.runner == 'ubuntu-latest' run: | sudo rm -rf /usr/share/dotnet @@ -55,15 +55,17 @@ jobs: sudo rm -rf "$AGENT_TOOLSDIRECTORY" sudo -E apt-get -qq autoremove --purge sudo -E apt-get -qq clean + - name: Check disk space run: | df -hT $PWD + - name: Set up Go ${{ matrix.go-version }} uses: actions/setup-go@v2 with: go-version: ${{ matrix.go-version }} - - name: Check out code into the Go module directory + - name: Check out code uses: actions/checkout@v2 with: submodules: true diff --git a/config_server/service/Dockerfile b/config_server/service/Dockerfile index aba0392dde..c5d00cdaa7 100644 --- a/config_server/service/Dockerfile +++ b/config_server/service/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-1 as build +FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 as build USER root WORKDIR /src diff --git a/core/CMakeLists.txt b/core/CMakeLists.txt index 0a1e0306eb..318b2bd8c2 100644 --- a/core/CMakeLists.txt +++ b/core/CMakeLists.txt @@ -16,6 +16,7 @@ cmake_minimum_required(VERSION 3.10) project(logtail) # Options. +option(BUILD_LOGTAIL "Build Logtail executable and tools" ON) option(ENABLE_ENTERPRISE "enable enterprise feature") option(ENABLE_COMPATIBLE_MODE "Build Logtail in compatible mode (for low version Linux)") option(ENABLE_STATIC_LINK_CRT "Build Logtail by linking CRT statically") @@ -80,34 +81,34 @@ foreach (DIR_NAME ${SUB_DIRECTORIES_LIST}) endforeach (DIR_NAME) # Logtail executable. -if (ENABLE_ENTERPRISE) - if (UNIX) - add_executable(${LOGTAIL_TARGET} enterprise_logtail.cpp) - elseif (MSVC) - add_executable(${LOGTAIL_TARGET} enterprise_logtail_windows.cpp) - endif () -else () - if (UNIX) - add_executable(${LOGTAIL_TARGET} logtail.cpp) - elseif (MSVC) - add_executable(${LOGTAIL_TARGET} logtail_windows.cpp) - endif () -endif() -target_link_libraries(${LOGTAIL_TARGET} application common logger) -if (UNIX) - target_link_libraries(${LOGTAIL_TARGET} pthread dl uuid) - if (ENABLE_STATIC_LINK_CRT) - target_link_libraries(${LOGTAIL_TARGET} -static-libstdc++ -static-libgcc) - endif () - if (ENABLE_COMPATIBLE_MODE) - target_link_libraries(${LOGTAIL_TARGET} rt) - target_link_libraries(${LOGTAIL_TARGET} pthread dl uuid -static-libstdc++ -static-libgcc) +if (BUILD_LOGTAIL) + if (ENABLE_ENTERPRISE) + if (UNIX) + add_executable(${LOGTAIL_TARGET} enterprise_logtail.cpp) + elseif (MSVC) + add_executable(${LOGTAIL_TARGET} enterprise_logtail_windows.cpp) + endif () else () + if (UNIX) + add_executable(${LOGTAIL_TARGET} logtail.cpp) + elseif (MSVC) + add_executable(${LOGTAIL_TARGET} logtail_windows.cpp) + endif () + endif() + link_tcmalloc(${LOGTAIL_TARGET}) # should be the first lib to link + target_link_libraries(${LOGTAIL_TARGET} application common logger) + if (UNIX) target_link_libraries(${LOGTAIL_TARGET} pthread dl uuid) + if (ENABLE_COMPATIBLE_MODE) + target_link_libraries(${LOGTAIL_TARGET} rt) + target_link_libraries(${LOGTAIL_TARGET} -static-libstdc++ -static-libgcc) + elseif (ENABLE_STATIC_LINK_CRT) + target_link_libraries(${LOGTAIL_TARGET} -static-libstdc++ -static-libgcc) + endif () + link_ssl(${LOGTAIL_TARGET}) + link_crypto(${LOGTAIL_TARGET}) endif () - link_ssl(${LOGTAIL_TARGET}) - link_crypto(${LOGTAIL_TARGET}) -endif () +endif() # Logtail UT. if (BUILD_LOGTAIL_UT) diff --git a/core/common/CMakeLists.txt b/core/common/CMakeLists.txt index 4933b62d7e..40c8733f2c 100644 --- a/core/common/CMakeLists.txt +++ b/core/common/CMakeLists.txt @@ -47,6 +47,7 @@ link_lz4(${PROJECT_NAME}) link_zlib(${PROJECT_NAME}) link_zstd(${PROJECT_NAME}) link_unwind(${PROJECT_NAME}) +link_asan(${PROJECT_NAME}) if (UNIX) target_link_libraries(${PROJECT_NAME} pthread uuid) elseif (MSVC) diff --git a/core/common/CrashBackTraceUtil.cpp b/core/common/CrashBackTraceUtil.cpp index af09000c34..ee56a905c5 100644 --- a/core/common/CrashBackTraceUtil.cpp +++ b/core/common/CrashBackTraceUtil.cpp @@ -81,6 +81,7 @@ void CrashBackTrace(int signum) { // std::free(demangled); } } + fflush(g_crashBackTraceFilePtr); fclose(g_crashBackTraceFilePtr); _exit(10); } diff --git a/core/common/Thread.h b/core/common/Thread.h index aa92e58a49..6d4c24ff70 100644 --- a/core/common/Thread.h +++ b/core/common/Thread.h @@ -15,11 +15,12 @@ */ #pragma once +#include -#include +#include #include +#include #include -#include #include "boost/thread.hpp" diff --git a/core/controller/CMakeLists.txt b/core/controller/CMakeLists.txt index 932b64f515..a38ff07ead 100644 --- a/core/controller/CMakeLists.txt +++ b/core/controller/CMakeLists.txt @@ -37,4 +37,3 @@ target_link_libraries(${PROJECT_NAME} go_pipeline) target_link_libraries(${PROJECT_NAME} config_manager) target_link_libraries(${PROJECT_NAME} input) target_link_libraries(${PROJECT_NAME} application) -link_tcmalloc(${PROJECT_NAME}) diff --git a/core/controller/EventDispatcher.cpp b/core/controller/EventDispatcher.cpp index 5a5f08e3cb..63186dbdb2 100644 --- a/core/controller/EventDispatcher.cpp +++ b/core/controller/EventDispatcher.cpp @@ -27,7 +27,7 @@ #include #include -#if !defined(LOGTAIL_NO_TC_MALLOC) +#ifndef LOGTAIL_NO_TC_MALLOC #include #include #endif diff --git a/core/dependencies.cmake b/core/dependencies.cmake index fad59d0c47..ca5526a552 100644 --- a/core/dependencies.cmake +++ b/core/dependencies.cmake @@ -46,7 +46,6 @@ set(DEP_NAME_LIST gtest protobuf re2 - tcmalloc # (gperftools) cityhash gflags jsoncpp @@ -63,6 +62,10 @@ set(DEP_NAME_LIST leveldb ) +if (NOT CMAKE_BUILD_TYPE MATCHES Debug) + list(APPEND DEP_NAME_LIST "tcmalloc") # (gperftools) +endif() + if (MSVC) if (NOT DEFINED unwind_${INCLUDE_DIR_SUFFIX}) set(unwind_${INCLUDE_DIR_SUFFIX} ${DEPS_INCLUDE_ROOT}/breakpad) @@ -138,15 +141,17 @@ endmacro() # tcmalloc (gperftools) macro(link_tcmalloc target_name) - if (tcmalloc_${LINK_OPTION_SUFFIX}) - target_link_libraries(${target_name} "${tcmalloc_${LINK_OPTION_SUFFIX}}") - elseif (UNIX) - target_link_libraries(${target_name} "${tcmalloc_${LIBRARY_DIR_SUFFIX}}/libtcmalloc.a") - elseif (MSVC) - add_definitions(-DPERFTOOLS_DLL_DECL=) - target_link_libraries(${target_name} - debug "libtcmalloc_minimald" - optimized "libtcmalloc_minimal") + if(NOT CMAKE_BUILD_TYPE MATCHES Debug) + if (tcmalloc_${LINK_OPTION_SUFFIX}) + target_link_libraries(${target_name} "${tcmalloc_${LINK_OPTION_SUFFIX}}") + elseif (UNIX) + target_link_libraries(${target_name} "${tcmalloc_${LIBRARY_DIR_SUFFIX}}/libtcmalloc.a") + elseif (MSVC) + add_definitions(-DPERFTOOLS_DLL_DECL=) + target_link_libraries(${target_name} + debug "libtcmalloc_minimald" + optimized "libtcmalloc_minimal") + endif () endif () endmacro() @@ -348,3 +353,10 @@ macro(link_leveldb target_name) endif () endmacro() +# asan for debug +macro(link_asan target_name) + if(CMAKE_BUILD_TYPE MATCHES Debug) + target_compile_options(${target_name} PUBLIC -fsanitize=address) + target_link_options(${target_name} PUBLIC -fsanitize=address -static-libasan) + endif() +endmacro() diff --git a/core/monitor/LogtailAlarm.cpp b/core/monitor/LogtailAlarm.cpp index 5358aa2173..8f70e8b04e 100644 --- a/core/monitor/LogtailAlarm.cpp +++ b/core/monitor/LogtailAlarm.cpp @@ -100,58 +100,71 @@ LogtailAlarm::LogtailAlarm() { mMessageType[OBSERVER_RUNTIME_ALARM] = "OBSERVER_RUNTIME_ALARM"; mMessageType[OBSERVER_STOP_ALARM] = "OBSERVER_STOP_ALARM"; - new Thread([this]() { SendAlarmLoop(); }); + mThread.reset(new Thread([this]() { SendAlarmLoop(); })); } LogtailAlarm::~LogtailAlarm() { + Stop(); + mThread->Wait(1000000); // thread should stop before members destruct } bool LogtailAlarm::SendAlarmLoop() { - LogtailAlarmMessage* messagePtr = NULL; - while (true) { - int32_t currentTime = time(NULL); + { + std::unique_lock lock(mStopMutex); + while (!mStopFlag) { + SendAllRegionAlarm(); + if (mStopCV.wait_for(lock, std::chrono::seconds(3), [this]() { return mStopFlag; })) { + break; + } + } + } + SendAllRegionAlarm(); + return true; +} - size_t sendRegionIndex = 0; - size_t sendAlarmTypeIndex = 0; - do { - LogGroup logGroup; - string region; - { - PTScopedLock lock(mAlarmBufferMutex); - if (mAllAlarmMap.size() <= sendRegionIndex) { - break; - } - std::map > >::iterator allAlarmIter - = mAllAlarmMap.begin(); - size_t iterIndex = 0; - while (iterIndex != sendRegionIndex) { - ++iterIndex; - ++allAlarmIter; - } - region = allAlarmIter->first; - // LOG_DEBUG(sLogger, ("1Send Alarm", region)("region", sendRegionIndex)); - LogtailAlarmVector& alarmBufferVec = *(allAlarmIter->second.first); - std::vector& lastUpdateTimeVec = allAlarmIter->second.second; - // check this region end - if (sendAlarmTypeIndex >= alarmBufferVec.size()) { - // jump this region - ++sendRegionIndex; - sendAlarmTypeIndex = 0; - continue; - } - // LOG_DEBUG(sLogger, ("2Send Alarm", region)("region", sendRegionIndex)("alarm index", - // mMessageType[sendAlarmTypeIndex])); - // check valid - if (alarmBufferVec.size() != (size_t)ALL_LOGTAIL_ALARM_NUM - || lastUpdateTimeVec.size() != (size_t)ALL_LOGTAIL_ALARM_NUM) { - LOG_ERROR(sLogger, - ("invalid alarm item", - region)("alarm vec", alarmBufferVec.size())("update vec", lastUpdateTimeVec.size())); - // jump this region - ++sendRegionIndex; - sendAlarmTypeIndex = 0; - continue; - } +void LogtailAlarm::SendAllRegionAlarm() { + LogtailAlarmMessage* messagePtr = nullptr; + int32_t currentTime = time(nullptr); + size_t sendRegionIndex = 0; + size_t sendAlarmTypeIndex = 0; + do { + LogGroup logGroup; + string region; + { + PTScopedLock lock(mAlarmBufferMutex); + if (mAllAlarmMap.size() <= sendRegionIndex) { + break; + } + auto allAlarmIter = mAllAlarmMap.begin(); + size_t iterIndex = 0; + while (iterIndex != sendRegionIndex) { + ++iterIndex; + ++allAlarmIter; + } + region = allAlarmIter->first; + // LOG_DEBUG(sLogger, ("1Send Alarm", region)("region", sendRegionIndex)); + LogtailAlarmVector& alarmBufferVec = *(allAlarmIter->second.first); + std::vector& lastUpdateTimeVec = allAlarmIter->second.second; + // check this region end + if (sendAlarmTypeIndex >= alarmBufferVec.size()) { + // jump this region + ++sendRegionIndex; + sendAlarmTypeIndex = 0; + continue; + } + // LOG_DEBUG(sLogger, ("2Send Alarm", region)("region", sendRegionIndex)("alarm index", + // mMessageType[sendAlarmTypeIndex])); + // check valid + if (alarmBufferVec.size() != (size_t)ALL_LOGTAIL_ALARM_NUM + || lastUpdateTimeVec.size() != (size_t)ALL_LOGTAIL_ALARM_NUM) { + LOG_ERROR(sLogger, + ("invalid alarm item", region)("alarm vec", alarmBufferVec.size())("update vec", + lastUpdateTimeVec.size())); + // jump this region + ++sendRegionIndex; + sendAlarmTypeIndex = 0; + continue; + } // LOG_DEBUG(sLogger, ("3Send Alarm", region)("region", sendRegionIndex)("alarm index", // mMessageType[sendAlarmTypeIndex])); @@ -172,49 +185,51 @@ bool LogtailAlarm::SendAlarmLoop() { continue; } - // LOG_DEBUG(sLogger, ("4Send Alarm", region)("region", sendRegionIndex)("alarm index", - // mMessageType[sendAlarmTypeIndex])); - logGroup.set_source(LogFileProfiler::mIpAddr); - logGroup.set_category("logtail_alarm"); - auto now = GetCurrentLogtailTime(); - for (map::iterator mapIter = alarmMap.begin(); mapIter != alarmMap.end(); - ++mapIter) { - messagePtr = mapIter->second; + // LOG_DEBUG(sLogger, ("4Send Alarm", region)("region", sendRegionIndex)("alarm index", + // mMessageType[sendAlarmTypeIndex])); + logGroup.set_source(LogFileProfiler::mIpAddr); + logGroup.set_category("logtail_alarm"); + auto now = GetCurrentLogtailTime(); + for (map::iterator mapIter = alarmMap.begin(); mapIter != alarmMap.end(); + ++mapIter) { + messagePtr = mapIter->second; - // LOG_DEBUG(sLogger, ("5Send Alarm", region)("region", sendRegionIndex)("alarm index", - // sendAlarmTypeIndex)("msg", messagePtr->mMessage)); + // LOG_DEBUG(sLogger, ("5Send Alarm", region)("region", sendRegionIndex)("alarm index", + // sendAlarmTypeIndex)("msg", messagePtr->mMessage)); - Log* logPtr = logGroup.add_logs(); - SetLogTime(logPtr, AppConfig::GetInstance()->EnableLogTimeAutoAdjust() ? now.tv_sec + GetTimeDelta() : now.tv_sec); - Log_Content* contentPtr = logPtr->add_contents(); - contentPtr->set_key("alarm_type"); - contentPtr->set_value(messagePtr->mMessageType); + Log* logPtr = logGroup.add_logs(); + SetLogTime(logPtr, + AppConfig::GetInstance()->EnableLogTimeAutoAdjust() ? now.tv_sec + GetTimeDelta() + : now.tv_sec); + Log_Content* contentPtr = logPtr->add_contents(); + contentPtr->set_key("alarm_type"); + contentPtr->set_value(messagePtr->mMessageType); - contentPtr = logPtr->add_contents(); - contentPtr->set_key("alarm_message"); - contentPtr->set_value(messagePtr->mMessage); + contentPtr = logPtr->add_contents(); + contentPtr->set_key("alarm_message"); + contentPtr->set_value(messagePtr->mMessage); - contentPtr = logPtr->add_contents(); - contentPtr->set_key("alarm_count"); - contentPtr->set_value(ToString(messagePtr->mCount)); + contentPtr = logPtr->add_contents(); + contentPtr->set_key("alarm_count"); + contentPtr->set_value(ToString(messagePtr->mCount)); - contentPtr = logPtr->add_contents(); - contentPtr->set_key("ip"); - contentPtr->set_value(LogFileProfiler::mIpAddr); + contentPtr = logPtr->add_contents(); + contentPtr->set_key("ip"); + contentPtr->set_value(LogFileProfiler::mIpAddr); - contentPtr = logPtr->add_contents(); - contentPtr->set_key("os"); - contentPtr->set_value(OS_NAME); + contentPtr = logPtr->add_contents(); + contentPtr->set_key("os"); + contentPtr->set_value(OS_NAME); - contentPtr = logPtr->add_contents(); - contentPtr->set_key("ver"); - contentPtr->set_value(ILOGTAIL_VERSION); + contentPtr = logPtr->add_contents(); + contentPtr->set_key("ver"); + contentPtr->set_value(ILOGTAIL_VERSION); - if (!messagePtr->mProjectName.empty()) { - contentPtr = logPtr->add_contents(); - contentPtr->set_key("project_name"); - contentPtr->set_value(messagePtr->mProjectName); - } + if (!messagePtr->mProjectName.empty()) { + contentPtr = logPtr->add_contents(); + contentPtr->set_key("project_name"); + contentPtr->set_value(messagePtr->mProjectName); + } if (!messagePtr->mCategory.empty()) { contentPtr = logPtr->add_contents(); @@ -233,19 +248,15 @@ bool LogtailAlarm::SendAlarmLoop() { // this is an anonymous send and non lock send ProfileSender::GetInstance()->SendToProfileProject(region, logGroup); } while (true); - - sleep(3); } - return true; } LogtailAlarm::LogtailAlarmVector* LogtailAlarm::MakesureLogtailAlarmMapVecUnlocked(const string& region) { // @todo // string region; - std::map > >::iterator iter - = mAllAlarmMap.find(region); + auto iter = mAllAlarmMap.find(region); if (iter == mAllAlarmMap.end()) { - LogtailAlarmVector* pMapVec = new LogtailAlarmVector; + auto pMapVec = std::make_shared(); // need resize to init this obj pMapVec->resize(ALL_LOGTAIL_ALARM_NUM); @@ -255,9 +266,9 @@ LogtailAlarm::LogtailAlarmVector* LogtailAlarm::MakesureLogtailAlarmMapVecUnlock for (uint32_t i = 0; i < ALL_LOGTAIL_ALARM_NUM; ++i) lastUpdateTime[i] = now - rand() % 180; mAllAlarmMap[region] = std::make_pair(pMapVec, lastUpdateTime); - return pMapVec; + return pMapVec.get(); } - return iter->second.first; + return iter->second.first.get(); } void LogtailAlarm::SendAlarm(const LogtailAlarmType alarmType, @@ -291,6 +302,15 @@ void LogtailAlarm::ForceToSend() { INT32_FLAG(logtail_alarm_interval) = 0; } +void LogtailAlarm::Stop() { + ForceToSend(); + { + std::lock_guard lock(mStopMutex); + mStopFlag = true; + } + mStopCV.notify_one(); +} + bool LogtailAlarm::IsLowLevelAlarmValid() { int32_t curTime = time(NULL); if (curTime == mLastLowLevelTime) { diff --git a/core/monitor/LogtailAlarm.h b/core/monitor/LogtailAlarm.h index 8486034334..b656d4bd18 100644 --- a/core/monitor/LogtailAlarm.h +++ b/core/monitor/LogtailAlarm.h @@ -20,6 +20,7 @@ #include #include #include +#include "common/Thread.h" #include "common/Lock.h" #include "profile_sender/ProfileSender.h" @@ -113,19 +114,24 @@ class LogtailAlarm { private: std::vector mMessageType; typedef std::vector > LogtailAlarmVector; - std::map > > mAllAlarmMap; + std::map, std::vector > > mAllAlarmMap; PTMutex mAlarmBufferMutex; LogtailAlarm(); - ~LogtailAlarm(); bool SendAlarmLoop(); // without lock LogtailAlarmVector* MakesureLogtailAlarmMapVecUnlocked(const std::string& region); + void SendAllRegionAlarm(); std::atomic_int mLastLowLevelTime{0}; std::atomic_int mLastLowLevelCount{0}; + std::unique_ptr mThread; + std::mutex mStopMutex; + std::condition_variable mStopCV; + bool mStopFlag = false; public: + ~LogtailAlarm(); void SendAlarm(const LogtailAlarmType alarmType, const std::string& message, const std::string& projectName = "", @@ -133,10 +139,11 @@ class LogtailAlarm { const std::string& region = ""); // only be called when prepare to exit void ForceToSend(); + void Stop(); bool IsLowLevelAlarmValid(); static LogtailAlarm* GetInstance() { - static LogtailAlarm* ptr = new LogtailAlarm(); - return ptr; + static LogtailAlarm ptr; + return &ptr; } }; diff --git a/core/observer/metas/ServiceMetaCache.cpp b/core/observer/metas/ServiceMetaCache.cpp index a788fc0683..6dd69d3a5e 100644 --- a/core/observer/metas/ServiceMetaCache.cpp +++ b/core/observer/metas/ServiceMetaCache.cpp @@ -17,9 +17,14 @@ namespace logtail { +static const ServiceMeta& GetEmptyHost() { + static const ServiceMeta sEmptyHost; + return sEmptyHost; +} + const ServiceMeta& ServiceMetaCache::Get(const std::string& remoteIP, ProtocolType protocolType) { if (mIndexMap.find(remoteIP) == mIndexMap.end()) { - return *sEmptyHost; + return GetEmptyHost(); } mData.splice(mData.begin(), mData, mIndexMap[remoteIP]); mData.begin()->second.Category = DetectRemoteServiceCategory(protocolType); @@ -29,7 +34,7 @@ const ServiceMeta& ServiceMetaCache::Get(const std::string& remoteIP, ProtocolTy const ServiceMeta& ServiceMetaCache::Get(const std::string& remoteIP) { if (mIndexMap.find(remoteIP) == mIndexMap.end()) { - return *sEmptyHost; + return GetEmptyHost(); } mData.splice(mData.begin(), mData, mIndexMap[remoteIP]); mData.front().second.time = time(nullptr); @@ -112,7 +117,7 @@ ServiceMetaManager::doGetOrPutServiceMeta(uint32_t pid, const std::string& ip, P auto meta = mHostnameMetas.find(pid); if (meta == mHostnameMetas.end()) { if (IsRemoteInvokeProtocolType(protocolType)) { - return *sEmptyHost; + return GetEmptyHost(); } meta = mHostnameMetas.insert(std::make_pair(pid, new ServiceMetaCache(200))).first; meta->second->Put(ip, "", protocolType); @@ -129,7 +134,7 @@ ServiceMetaManager::doGetOrPutServiceMeta(uint32_t pid, const std::string& ip, P inline const ServiceMeta& ServiceMetaManager::doGetServiceMeta(uint32_t pid, const std::string& ip) { auto meta = mHostnameMetas.find(pid); if (meta == mHostnameMetas.end()) { - return *sEmptyHost; + return GetEmptyHost(); } return meta->second->Get(ip); } diff --git a/core/observer/metas/ServiceMetaCache.h b/core/observer/metas/ServiceMetaCache.h index 8a7468c260..00e5592062 100644 --- a/core/observer/metas/ServiceMetaCache.h +++ b/core/observer/metas/ServiceMetaCache.h @@ -52,8 +52,6 @@ struct ServiceMeta { bool Empty() const { return time == 0; } }; -static const ServiceMeta* sEmptyHost = new ServiceMeta; - template class LRUCache { diff --git a/core/options.cmake b/core/options.cmake index 6a6c0a6f43..a1f0203290 100644 --- a/core/options.cmake +++ b/core/options.cmake @@ -46,8 +46,8 @@ if (UNIX) set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -g -ggdb -fpic -fPIC -D_LARGEFILE64_SOURCE") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17 -Wall -g -ggdb -fpic -fPIC -D_LARGEFILE64_SOURCE") endif () - set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0") - set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0") + set(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O1 -fno-omit-frame-pointer -DLOGTAIL_NO_TC_MALLOC") + set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O1 -fno-omit-frame-pointer -DLOGTAIL_NO_TC_MALLOC") set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} -O2") set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O2") string(REPLACE "-O3" "" CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE}") diff --git a/core/parser/LogParser.cpp b/core/parser/LogParser.cpp index 2c2a7ce93e..ad56a7817f 100644 --- a/core/parser/LogParser.cpp +++ b/core/parser/LogParser.cpp @@ -553,7 +553,8 @@ bool LogParser::WholeLineModeParser( int32_t LogParser::GetApsaraLogMicroTime(const char* buffer) { int begIndex = 0; - char tmp[6]; + static const int MICRO_TIME_LEN = 6; + char tmp[MICRO_TIME_LEN + 1]{}; while (buffer[begIndex]) { if (buffer[begIndex] == '.') { begIndex++; @@ -562,19 +563,17 @@ int32_t LogParser::GetApsaraLogMicroTime(const char* buffer) { begIndex++; } int index = 0; - while (buffer[begIndex + index] && index < 6) { - if (buffer[begIndex + index] == ']') { + while (buffer[begIndex + index] && index < MICRO_TIME_LEN) { + if (buffer[begIndex + index] == ']' || buffer[begIndex + index] == '\0') { break; } tmp[index] = buffer[begIndex + index]; index++; } - if (index < 6) { - for (int i = index; i < 6; i++) { - tmp[i] = '0'; - } + for (; index < MICRO_TIME_LEN; ++index) { + tmp[index] = '0'; } - char* endPtr; + char* endPtr{}; return strtol(tmp, &endPtr, 10); } diff --git a/core/processor/ProcessorParseRegexNative.cpp b/core/processor/ProcessorParseRegexNative.cpp index 1beccbab7e..31afa8e849 100644 --- a/core/processor/ProcessorParseRegexNative.cpp +++ b/core/processor/ProcessorParseRegexNative.cpp @@ -48,6 +48,14 @@ bool ProcessorParseRegexNative::Init(const Json::Value& config) { if (!GetMandatoryListParam(config, "Keys", mKeys, errorMsg)) { PARAM_ERROR_RETURN(mContext->GetLogger(), errorMsg, sName, mContext->GetConfigName()); } + // Since the 'keys' field in old logtail config is an array with a single comma separated string inside (e.g., + // ["k1,k2,k3"]), which is different from openAPI, chances are the 'key' field in openAPI is unintentionally set to + // the 'keys' field in logtail config. However, such wrong format can still work in logtail due to the conversion + // done in the server, which simply concatenates all strings in the array with comma. Therefor, to be compatibal + // with such wrong behavior, we must explicitly allow such format. + if (mKeys.size() == 1 && mKeys[0].find(',') != std::string::npos) { + mKeys = SplitString(mKeys[0], ","); + } for (const auto& it : mKeys) { if (it == mSourceKey) { mSourceKeyOverwritten = true; diff --git a/core/reader/CMakeLists.txt b/core/reader/CMakeLists.txt index fc2a6fafc7..a04030ba06 100644 --- a/core/reader/CMakeLists.txt +++ b/core/reader/CMakeLists.txt @@ -33,5 +33,4 @@ target_link_libraries(${PROJECT_NAME} config_manager) target_link_libraries(${PROJECT_NAME} log_pb) target_link_libraries(${PROJECT_NAME} fuse) target_link_libraries(${PROJECT_NAME} input) -link_cityhash(${PROJECT_NAME}) -link_boost(${PROJECT_NAME}) \ No newline at end of file +link_cityhash(${PROJECT_NAME}) \ No newline at end of file diff --git a/core/unittest/CMakeLists.txt b/core/unittest/CMakeLists.txt index bb3e6ccefd..decc0401b8 100644 --- a/core/unittest/CMakeLists.txt +++ b/core/unittest/CMakeLists.txt @@ -35,6 +35,7 @@ link_re2(${PROJECT_NAME}) link_protobuf(${PROJECT_NAME}) link_cityhash(${PROJECT_NAME}) link_leveldb(${PROJECT_NAME}) +link_asan(${PROJECT_NAME}) if (UNIX) target_link_libraries(${PROJECT_NAME} pthread uuid dl) if (ENABLE_COMPATIBLE_MODE) diff --git a/core/unittest/flusher/FlusherSLSUnittest.cpp b/core/unittest/flusher/FlusherSLSUnittest.cpp index ee15195630..f774118c37 100644 --- a/core/unittest/flusher/FlusherSLSUnittest.cpp +++ b/core/unittest/flusher/FlusherSLSUnittest.cpp @@ -62,6 +62,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_TRUE(optionalGoPipeline.isNull()); APSARA_TEST_EQUAL("test_project", flusher->mProject); @@ -103,6 +104,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL("cn-hangzhou", flusher->mRegion); #ifdef __ENTERPRISE__ @@ -141,6 +143,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(STRING_FLAG(default_region_name), flusher->mRegion); APSARA_TEST_EQUAL("", flusher->mAliuid); @@ -184,6 +187,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL("cn-hangzhou.log.aliyuncs.com", flusher->mEndpoint); auto iter = Sender::Instance()->mRegionEndpointEntryMap.find("cn-hangzhou"); @@ -205,6 +209,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::CompressType::NONE, flusher->mCompressType); @@ -221,6 +226,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::CompressType::LZ4, flusher->mCompressType); @@ -237,6 +243,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::CompressType::LZ4, flusher->mCompressType); @@ -254,6 +261,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::TelemetryType::LOG, flusher->mTelemetryType); @@ -270,6 +278,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::TelemetryType::LOG, flusher->mTelemetryType); @@ -289,6 +298,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::Batch::MergeType::TOPIC, flusher->mBatch.mMergeType); @@ -307,6 +317,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(FlusherSLS::Batch::MergeType::TOPIC, flusher->mBatch.mMergeType); @@ -337,6 +348,7 @@ void FlusherSLSUnittest::OnSuccessfulInit() { APSARA_TEST_TRUE(ParseJsonTable(optionalGoPipelineStr, optionalGoPipelineJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_TRUE(flusher->Init(configJson, optionalGoPipeline)); APSARA_TEST_TRUE(optionalGoPipelineJson == optionalGoPipeline); } @@ -357,6 +369,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); configStr = R"( @@ -370,6 +383,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); // invalid Logstore @@ -383,6 +397,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); configStr = R"( @@ -396,6 +411,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); // invalid Endpoint @@ -409,6 +425,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); configStr = R"( @@ -422,6 +439,7 @@ void FlusherSLSUnittest::OnFailedInit() { APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); flusher.reset(new FlusherSLS()); flusher->SetContext(ctx); + flusher->SetMetricsRecordRef(FlusherSLS::sName, "1"); APSARA_TEST_FALSE(flusher->Init(configJson, optionalGoPipeline)); } diff --git a/core/unittest/input/InputFileUnittest.cpp b/core/unittest/input/InputFileUnittest.cpp index 8e270bb78f..196888deae 100644 --- a/core/unittest/input/InputFileUnittest.cpp +++ b/core/unittest/input/InputFileUnittest.cpp @@ -69,7 +69,7 @@ void InputFileUnittest::OnSuccessfulInit() { configJson["FilePaths"].append(Json::Value(filePath.string())); input.reset(new InputFile()); input->SetContext(ctx); - APSARA_TEST_EQUAL(true, input->GetContext().GetLogger()->should_log(spdlog::level::err)); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_FALSE(input->mEnableContainerDiscovery); APSARA_TEST_EQUAL(0, input->mMaxCheckpointDirSearchDepth); @@ -89,6 +89,7 @@ void InputFileUnittest::OnSuccessfulInit() { configJson["FilePaths"].append(Json::Value(filePath.string())); input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_TRUE(input->mEnableContainerDiscovery); APSARA_TEST_EQUAL(1, input->mMaxCheckpointDirSearchDepth); @@ -108,6 +109,7 @@ void InputFileUnittest::OnSuccessfulInit() { configJson["FilePaths"].append(Json::Value(filePath.string())); input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_FALSE(input->mEnableContainerDiscovery); APSARA_TEST_EQUAL(0, input->mMaxCheckpointDirSearchDepth); @@ -125,6 +127,7 @@ void InputFileUnittest::OnSuccessfulInit() { configJson["FilePaths"].append(Json::Value(filePath.string())); input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_TRUE(input->mFileReader.mTailingAllMatchedFiles); APSARA_TEST_TRUE(input->mFileDiscovery.IsTailingAllMatchedFiles()); @@ -141,6 +144,7 @@ void InputFileUnittest::OnSuccessfulInit() { configJson["FilePaths"].append(Json::Value(filePath.string())); input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_EQUAL(0, input->mExactlyOnceConcurrency); } @@ -151,6 +155,7 @@ void InputFileUnittest::OnFailedInit() { input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_FALSE(input->Init(configJson, optionalGoPipeline)); } @@ -195,6 +200,7 @@ void InputFileUnittest::OnEnableContainerDiscovery() { optionalGoPipelineJson["inputs"][0]["detail"]["LogPath"] = Json::Value(filePath.parent_path().string()); input.reset(new InputFile()); input->SetContext(ctx); + input->SetMetricsRecordRef(InputFile::sName, "1"); APSARA_TEST_TRUE(input->Init(configJson, optionalGoPipeline)); APSARA_TEST_TRUE(input->mEnableContainerDiscovery); APSARA_TEST_TRUE(input->mFileDiscovery.IsContainerDiscoveryEnabled()); diff --git a/core/unittest/processor/ProcessorParseRegexNativeUnittest.cpp b/core/unittest/processor/ProcessorParseRegexNativeUnittest.cpp index d861ad1e8e..56e0109db5 100644 --- a/core/unittest/processor/ProcessorParseRegexNativeUnittest.cpp +++ b/core/unittest/processor/ProcessorParseRegexNativeUnittest.cpp @@ -25,9 +25,8 @@ namespace logtail { class ProcessorParseRegexNativeUnittest : public ::testing::Test { public: - void SetUp() override { mContext.SetConfigName("project##config_0"); } - void TestInit(); + void OnSuccessfulInit(); void TestProcessWholeLine(); void TestProcessRegex(); void TestAddLog(); @@ -37,26 +36,12 @@ class ProcessorParseRegexNativeUnittest : public ::testing::Test { void TestProcessRegexRaw(); void TestProcessRegexContent(); - PipelineContext mContext; -}; - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestInit); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessWholeLine); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegex); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestAddLog); +protected: + void SetUp() override { ctx.SetConfigName("test_config"); } -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventKeepUnmatch); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventDiscardUnmatch); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventKeyCountUnmatch); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegexRaw); - -UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegexContent); +private: + PipelineContext ctx; +}; void ProcessorParseRegexNativeUnittest::TestInit() { // make config @@ -74,7 +59,33 @@ void ProcessorParseRegexNativeUnittest::TestInit() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); +} + +void ProcessorParseRegexNativeUnittest::OnSuccessfulInit() { + // Keys + std::unique_ptr processor; + Json::Value configJson; + std::string configStr, errorMsg; + + configStr = R"""( + { + "Type": "processor_parse_regex_native", + "SourceKey": "content", + "Keys": [ + "k1,k2" + ], + "Regex": "(\\d+)\\s+(\\d+)" + } + )"""; + APSARA_TEST_TRUE(ParseJsonTable(configStr, configJson, errorMsg)); + processor.reset(new ProcessorParseRegexNative()); + processor->SetContext(ctx); + processor->SetMetricsRecordRef(ProcessorParseRegexNative::sName, "1"); + APSARA_TEST_TRUE(processor->Init(configJson)); + APSARA_TEST_EQUAL(2, processor->mKeys.size()); + APSARA_TEST_EQUAL("k1", processor->mKeys[0]); + APSARA_TEST_EQUAL("k2", processor->mKeys[1]); } void ProcessorParseRegexNativeUnittest::TestProcessWholeLine() { @@ -120,11 +131,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessWholeLine() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + // judge result std::string outJson = eventGroupList[0].ToJsonString(); APSARA_TEST_STREQ_FATAL(CompactJson(inJson).c_str(), CompactJson(outJson).c_str()); @@ -182,11 +193,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessRegex() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + // judge result std::string expectJson = R"({ "events" : @@ -261,11 +272,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessRegexRaw() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + // judge result std::string expectJson = R"({ "events" : @@ -338,11 +349,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessRegexContent() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + // judge result std::string expectJson = R"({ "events" : @@ -391,7 +402,7 @@ void ProcessorParseRegexNativeUnittest::TestAddLog() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); auto sourceBuffer = std::make_shared(); auto logEvent = LogEvent::CreateEvent(sourceBuffer); @@ -468,11 +479,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessEventKeepUnmatch() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + int count = 5; @@ -559,11 +570,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessEventDiscardUnmatch() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + int count = 5; @@ -651,11 +662,11 @@ void ProcessorParseRegexNativeUnittest::TestProcessEventKeyCountUnmatch() { ProcessorParseRegexNative& processor = *(new ProcessorParseRegexNative); std::string pluginId = "testID"; ProcessorInstance processorInstance(&processor, pluginId); - APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, mContext)); + APSARA_TEST_TRUE_FATAL(processorInstance.Init(config, ctx)); std::vector eventGroupList; eventGroupList.emplace_back(std::move(eventGroup)); processorInstance.Process(eventGroupList); - + int count = 5; // check observablity @@ -675,6 +686,17 @@ void ProcessorParseRegexNativeUnittest::TestProcessEventKeyCountUnmatch() { APSARA_TEST_EQUAL_FATAL(count, processor.mProcKeyCountNotMatchErrorTotal->GetValue()); } +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestInit) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, OnSuccessfulInit) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessWholeLine) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegex) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestAddLog) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventKeepUnmatch) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventDiscardUnmatch) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessEventKeyCountUnmatch) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegexRaw) +UNIT_TEST_CASE(ProcessorParseRegexNativeUnittest, TestProcessRegexContent) + } // namespace logtail UNIT_TEST_MAIN diff --git a/docker/Dockerfile.ilogtail-toolchain-linux b/docker/Dockerfile.ilogtail-toolchain-linux index 744a0418dd..3575c561b5 100644 --- a/docker/Dockerfile.ilogtail-toolchain-linux +++ b/docker/Dockerfile.ilogtail-toolchain-linux @@ -61,9 +61,8 @@ FROM --platform=$TARGETPLATFORM sls-opensource-registry.cn-shanghai.cr.aliyuncs. # install dev tool set and debug utilities RUN yum -y install centos-release-scl -RUN yum -y install devtoolset-9-gcc devtoolset-9-gcc-c++ make libuuid-devel libstdc++-static systemd-devel iproute gdb net-tools which wget vim tree man openssh-clients sudo -RUN yum remove -y centos-release-scl centos-release-scl-rh && \ - yum -y clean all && rm -fr /var/cache && rm -rf /core.* +RUN yum -y install devtoolset-9-gcc devtoolset-9-gcc-c++ devtoolset-9-libasan-devel make libuuid-devel libstdc++-static systemd-devel iproute gdb net-tools which wget vim tree man openssh-clients sudo +RUN yum -y clean all && rm -fr /var/cache && rm -rf /core.* RUN [[ ${TARGETPLATFORM##*/} == 'amd64' ]] && ARCH='x86_64' || ARCH='aarch64' && \ debuginfo-install -y glibc-2.17-326.el7_9.${ARCH} libuuid-2.23.2-65.el7_9.1.${ARCH} diff --git a/docker/Dockerfile_build b/docker/Dockerfile_build index 10aedc58dd..4b52c9c7c7 100644 --- a/docker/Dockerfile_build +++ b/docker/Dockerfile_build @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-2 as build +FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 as build WORKDIR /src diff --git a/docker/Dockerfile_development_part b/docker/Dockerfile_development_part index 0070d61c69..74d331c6f1 100644 --- a/docker/Dockerfile_development_part +++ b/docker/Dockerfile_development_part @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-2 +FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 ARG HOST_OS=Linux ARG VERSION=1.8.1 diff --git a/docker/Dockerfile_goc b/docker/Dockerfile_goc index 17772ec405..6a816dc5ba 100644 --- a/docker/Dockerfile_goc +++ b/docker/Dockerfile_goc @@ -14,7 +14,7 @@ # goc server is only for e2e test to analysis code coverage. -FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-2 as build +FROM sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 as build USER root ENTRYPOINT ["goc","server"] diff --git a/docs/cn/developer-guide/development-environment.md b/docs/cn/developer-guide/development-environment.md index 93554b9895..5707f49d03 100644 --- a/docs/cn/developer-guide/development-environment.md +++ b/docs/cn/developer-guide/development-environment.md @@ -82,7 +82,7 @@ go install ... ```json { - "image": "sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-1", + "image": "sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3", "customizations": { "vscode": { "extensions": [ @@ -186,7 +186,7 @@ cp -a ./core/build/go_pipeline/libPluginAdapter.so ./output ```bash docker run --name ilogtail-build -d \ -v `pwd`:/src -w /src \ - sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-1 \ + sls-opensource-registry.cn-shanghai.cr.aliyuncs.com/ilogtail-community-edition/ilogtail-build-linux:gcc_9.3.1-3 \ bash -c "sleep infinity" ``` diff --git a/plugin_main/plugin_export.go b/plugin_main/plugin_export.go index b98b9e4a1c..d3319bc691 100644 --- a/plugin_main/plugin_export.go +++ b/plugin_main/plugin_export.go @@ -106,6 +106,14 @@ func LoadConfig(project string, logstore string, configName string, logstoreKey logger.Error(context.Background(), "CONFIG_LOAD_ALARM", "cannot load config before hold on the running configs") return 1 } + defer func() { + if err := recover(); err != nil { + trace := make([]byte, 2048) + runtime.Stack(trace, true) + logger.Error(context.Background(), "PLUGIN_RUNTIME_ALARM", "panicked", err, "stack", string(trace)) + } + }() + err := pluginmanager.LoadLogstoreConfig(util.StringDeepCopy(project), util.StringDeepCopy(logstore), util.StringDeepCopy(configName), // Make deep copy if you want to save it in Go in the future. diff --git a/plugins/input/systemv2/input_system_linux.go b/plugins/input/systemv2/input_system_linux.go index 4f148b4f0f..e9b3c326d8 100644 --- a/plugins/input/systemv2/input_system_linux.go +++ b/plugins/input/systemv2/input_system_linux.go @@ -99,6 +99,7 @@ func (st tcpState) String() string { } func (r *InputSystem) Init(context pipeline.Context) (int, error) { + r.context = context // mount the host proc path fs, err := procfs.NewFS(helper.GetMountedFilePath(procfs.DefaultMountPoint)) if err != nil { diff --git a/plugins/input/systemv2/input_system_v2.go b/plugins/input/systemv2/input_system_v2.go index bbc048ef56..66c98273db 100644 --- a/plugins/input/systemv2/input_system_v2.go +++ b/plugins/input/systemv2/input_system_v2.go @@ -96,7 +96,6 @@ func (r *InputSystem) CommonInit(context pipeline.Context) (int, error) { } r.excludeDiskPathRegex = reg } - r.context = context r.commonLabels.Append("hostname", util.GetHostName()) r.commonLabels.Append("ip", util.GetIPAddress()) for key, val := range r.Labels { diff --git a/scripts/gen_build_scripts.sh b/scripts/gen_build_scripts.sh index bf3e40b893..514a9a3297 100755 --- a/scripts/gen_build_scripts.sh +++ b/scripts/gen_build_scripts.sh @@ -32,9 +32,11 @@ COPY_GIT_CONFIGS=${7:-${DOCKER_BUILD_COPY_GIT_CONFIGS:-true}} PLUGINS_CONFIG_FILE=${8:-${PLUGINS_CONFIG_FILE:-plugins.yml,external_plugins.yml}} GO_MOD_FILE=${9:-${GO_MOD_FILE:-go.mod}} +BUILD_TYPE=${BUILD_TYPE:-Release} +BUILD_LOGTAIL=${BUILD_LOGTAIL:-ON} BUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT:-OFF} -ENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE:-OFF} -ENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT:-OFF} +ENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE:-ON} +ENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT:-ON} WITHOUTGDB==${WITHOUTGDB:-OFF} BUILD_SCRIPT_FILE=$GENERATED_HOME/gen_build.sh COPY_SCRIPT_FILE=$GENERATED_HOME/gen_copy_docker.sh @@ -77,13 +79,13 @@ EOF chmod 755 $BUILD_SCRIPT_FILE if [ $CATEGORY = "plugin" ]; then - echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=Release -DLOGTAIL_VERSION=${VERSION} .. && cd plugin && make -s PluginAdapter && cd ../../.. && ./scripts/upgrade_adapter_lib.sh && ./scripts/plugin_build.sh mod c-shared ${OUT_DIR} ${VERSION} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE + echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DLOGTAIL_VERSION=${VERSION} .. && cd plugin && make -s PluginAdapter && cd ../../.. && ./scripts/upgrade_adapter_lib.sh && ./scripts/plugin_build.sh mod c-shared ${OUT_DIR} ${VERSION} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE elif [ $CATEGORY = "core" ]; then - echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=Release -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc" >>$BUILD_SCRIPT_FILE + echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL=${BUILD_LOGTAIL} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc" >>$BUILD_SCRIPT_FILE elif [ $CATEGORY = "all" ]; then - echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=Release -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc && cd - && ./scripts/upgrade_adapter_lib.sh && ./scripts/plugin_build.sh mod c-shared ${OUT_DIR} ${VERSION} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE + echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc && cd - && ./scripts/upgrade_adapter_lib.sh && ./scripts/plugin_build.sh mod c-shared ${OUT_DIR} ${VERSION} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE elif [ $CATEGORY = "e2e" ]; then - echo "mkdir -p core/build && cd core/build && cmake -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc && cd - && ./scripts/plugin_gocbuild.sh ${OUT_DIR} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE + echo "mkdir -p core/build && cd core/build && cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DLOGTAIL_VERSION=${VERSION} -DBUILD_LOGTAIL_UT=${BUILD_LOGTAIL_UT} -DENABLE_COMPATIBLE_MODE=${ENABLE_COMPATIBLE_MODE} -DENABLE_STATIC_LINK_CRT=${ENABLE_STATIC_LINK_CRT} -DWITHOUTGDB=${WITHOUTGDB} .. && make -sj\$nproc && cd - && ./scripts/plugin_gocbuild.sh ${OUT_DIR} ${PLUGINS_CONFIG_FILE} ${GO_MOD_FILE}" >>$BUILD_SCRIPT_FILE fi } @@ -92,19 +94,24 @@ function generateCopyScript() { echo 'BINDIR=$(cd $(dirname "${BASH_SOURCE[0]}")&& cd .. && pwd)/'${OUT_DIR}'/' >>$COPY_SCRIPT_FILE echo 'rm -rf $BINDIR && mkdir $BINDIR' >>$COPY_SCRIPT_FILE echo "id=\$(docker create ${REPOSITORY}:${VERSION})" >>$COPY_SCRIPT_FILE - if [ $BUILD_LOGTAIL_UT = "ON" ]; then - echo 'docker cp "$id":/src/core/build core/build' >>$COPY_SCRIPT_FILE - fi if [ $CATEGORY = "plugin" ]; then echo 'docker cp "$id":/src/'${OUT_DIR}'/libPluginBase.so $BINDIR' >>$COPY_SCRIPT_FILE elif [ $CATEGORY = "core" ]; then - echo 'docker cp "$id":/src/core/build/ilogtail $BINDIR' >>$COPY_SCRIPT_FILE - echo 'docker cp "$id":/src/core/build/go_pipeline/libPluginAdapter.so $BINDIR' >>$COPY_SCRIPT_FILE + if [ $BUILD_LOGTAIL = "ON" ]; then + echo 'docker cp "$id":/src/core/build/ilogtail $BINDIR' >>$COPY_SCRIPT_FILE + echo 'docker cp "$id":/src/core/build/go_pipeline/libPluginAdapter.so $BINDIR' >>$COPY_SCRIPT_FILE + fi + if [ $BUILD_LOGTAIL_UT = "ON" ]; then + echo 'docker cp "$id":/src/core/build core/build' >>$COPY_SCRIPT_FILE + fi else echo 'docker cp "$id":/src/'${OUT_DIR}'/libPluginBase.so $BINDIR' >>$COPY_SCRIPT_FILE echo 'docker cp "$id":/src/core/build/ilogtail $BINDIR' >>$COPY_SCRIPT_FILE echo 'docker cp "$id":/src/core/build/go_pipeline/libPluginAdapter.so $BINDIR' >>$COPY_SCRIPT_FILE + if [ $BUILD_LOGTAIL_UT = "ON" ]; then + echo 'docker cp "$id":/src/core/build core/build' >>$COPY_SCRIPT_FILE + fi fi echo 'echo -e "{\n}" > $BINDIR/ilogtail_config.json' >>$COPY_SCRIPT_FILE echo 'mkdir -p $BINDIR/user_yaml_config.d' >>$COPY_SCRIPT_FILE