Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/coverage' into go-config
Browse files Browse the repository at this point in the history
  • Loading branch information
Abingcbc committed Sep 25, 2024
2 parents 2bf161e + bea03ba commit 1cda480
Show file tree
Hide file tree
Showing 12 changed files with 103 additions and 112 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/build-core-ut.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -82,15 +82,15 @@ jobs:
run: make unittest_core

- name: Unit Test Coverage
run: docker build -t unittest_coverage -f ./docker/Dockerfile_coverage . && docker run -v $(pwd):$(pwd) unittest_coverage bash -c "cd $(pwd)/core && gcovr --root . --lcov coverage.lcov --txt coverage.txt -e \".*sdk.*\" -e \".*observer.*\" -e \".*protobuf.*\" -e \".*unittest.*\" -e \".*config_server.*\" -e \".*fuse.*\" -e \".*go_pipeline.*\""
run: docker build -t unittest_coverage -f ./docker/Dockerfile_coverage . && docker run -v $(pwd):$(pwd) unittest_coverage bash -c "cd $(pwd)/core && gcovr --root . --json coverage.json --json-summary-pretty --json-summary summary.json -e \".*sdk.*\" -e \".*observer.*\" -e \".*logger.*\" -e \".*unittest.*\" -e \".*config_server.*\" -e \".*go_pipeline.*\" -e \".*application.*\" -e \".*protobuf.*\" -e \".*runner.*\""

- name: Setup Python3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"

- name: Report code coverage
run: python3 tools/coverage-diff/main.py core/coverage.txt
run: python3 tools/coverage-diff/main.py --path core/coverage.json --summary core/summary.json

result:
runs-on: arc-runner-set-ilogtail
Expand Down
2 changes: 1 addition & 1 deletion core/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ set(SUB_DIRECTORIES_LIST
)
if (LINUX)
if (ENABLE_ENTERPRISE)
set(SUB_DIRECTORIES_LIST ${SUB_DIRECTORIES_LIST} shennong shennong/sdk streamlog aggregator)
set(SUB_DIRECTORIES_LIST ${SUB_DIRECTORIES_LIST} shennong shennong/sdk)
endif()
elseif(MSVC)
if (ENABLE_ENTERPRISE)
Expand Down
9 changes: 0 additions & 9 deletions core/app_config/AppConfig.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -851,15 +851,6 @@ bool AppConfig::CheckAndResetProxyAddress(const char* envKey, string& address) {
}

void AppConfig::LoadOtherConf(const Json::Value& confJson) {
// if (confJson.isMember("mapping_conf_path") && confJson["mapping_conf_path"].isString())
// mMappingConfigPath = confJson["mapping_conf_path"].asString();
// else
// mMappingConfigPath = STRING_FLAG(default_mapping_config_path);

if (confJson.isMember("streamlog_open") && confJson["streamlog_open"].isBool()) {
mOpenStreamLog = confJson["streamlog_open"].asBool();
}

{
int32_t oasConnectTimeout = 0;
if (LoadInt32Parameter(
Expand Down
4 changes: 2 additions & 2 deletions core/app_config/AppConfig.h
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class AppConfig {
// uint32_t mStreamLogTcpPort;
// uint32_t mStreamLogPoolSizeInMb;
// uint32_t mStreamLogRcvLenPerCall;
bool mOpenStreamLog;
// bool mOpenStreamLog;

// performance
float mCpuUsageUpLimit;
Expand Down Expand Up @@ -285,7 +285,7 @@ class AppConfig {

// uint32_t GetStreamLogRcvLenPerCall() const { return mStreamLogRcvLenPerCall; }

bool GetOpenStreamLog() const { return mOpenStreamLog; }
// bool GetOpenStreamLog() const { return mOpenStreamLog; }

std::string GetIlogtailConfigJson() {
ScopedSpinLock lock(mAppConfigLock);
Expand Down
1 change: 0 additions & 1 deletion core/application/Application.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@
#if defined(__linux__) && !defined(__ANDROID__)
#include "common/LinuxDaemonUtil.h"
#include "shennong/ShennongManager.h"
#include "streamlog/StreamLogManager.h"
#endif
#else
#include "provider/Provider.h"
Expand Down
40 changes: 0 additions & 40 deletions core/config/PipelineConfig.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -140,9 +140,6 @@ bool PipelineConfig::Parse() {
// extensions module parsing will rely on their results.
bool hasObserverInput = false;
bool hasFileInput = false;
#ifdef __ENTERPRISE__
bool hasStreamInput = false;
#endif
key = "inputs";
itr = mDetail->find(key.c_str(), key.c_str() + key.size());
if (!itr) {
Expand Down Expand Up @@ -244,21 +241,10 @@ bool PipelineConfig::Parse() {
hasObserverInput = true;
} else if (pluginType == "input_file" || pluginType == "input_container_stdio") {
hasFileInput = true;
#ifdef __ENTERPRISE__
} else if (pluginType == "input_stream") {
if (!AppConfig::GetInstance()->GetOpenStreamLog()) {
PARAM_ERROR_RETURN(
sLogger, alarm, "stream log is not enabled", noModule, mName, mProject, mLogstore, mRegion);
}
hasStreamInput = true;
#endif
}
}
// TODO: remove these special restrictions
bool hasSpecialInput = hasObserverInput || hasFileInput;
#ifdef __ENTERPRISE__
hasSpecialInput = hasSpecialInput || hasStreamInput;
#endif
if (hasSpecialInput && (*mDetail)["inputs"].size() > 1) {
PARAM_ERROR_RETURN(sLogger,
alarm,
Expand All @@ -283,19 +269,6 @@ bool PipelineConfig::Parse() {
mLogstore,
mRegion);
}
#ifdef __ENTERPRISE__
// TODO: remove these special restrictions
if (hasStreamInput && !itr->empty()) {
PARAM_ERROR_RETURN(sLogger,
alarm,
"processor plugins coexist with input_stream",
noModule,
mName,
mProject,
mLogstore,
mRegion);
}
#endif
bool isCurrentPluginNative = true;
for (Json::Value::ArrayIndex i = 0; i < itr->size(); ++i) {
const Json::Value& plugin = (*itr)[i];
Expand Down Expand Up @@ -520,19 +493,6 @@ bool PipelineConfig::Parse() {
PARAM_ERROR_RETURN(
sLogger, alarm, "unsupported flusher plugin", pluginType, mName, mProject, mLogstore, mRegion);
}
#ifdef __ENTERPRISE__
// TODO: remove these special restrictions
if (hasStreamInput && pluginType != "flusher_sls") {
PARAM_ERROR_RETURN(sLogger,
alarm,
"flusher plugins other than flusher_sls coexist with input_stream",
noModule,
mName,
mProject,
mLogstore,
mRegion);
}
#endif
mFlushers.push_back(&plugin);
}
// TODO: remove these special restrictions
Expand Down
1 change: 0 additions & 1 deletion core/monitor/Monitor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,6 @@ bool LogtailMonitor::SendStatusProfile(bool suicide) {
AddLogContent(logPtr, "projects", FlusherSLS::GetAllProjects());
AddLogContent(logPtr, "instance_id", Application::GetInstance()->GetInstanceId());
AddLogContent(logPtr, "instance_key", id);
AddLogContent(logPtr, "syslog_open", AppConfig::GetInstance()->GetOpenStreamLog());
// Host informations.
AddLogContent(logPtr, "ip", LogFileProfiler::mIpAddr);
AddLogContent(logPtr, "hostname", LogFileProfiler::mHostname);
Expand Down
6 changes: 0 additions & 6 deletions core/pipeline/plugin/PluginRegistry.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,6 @@
#include "plugin/input/InputNetworkSecurity.h"
#include "plugin/input/InputProcessSecurity.h"
#include "plugin/input/InputObserverNetwork.h"
#ifdef __ENTERPRISE__
#include "plugin/input/InputStream.h"
#endif
#endif
#include "logger/Logger.h"
#include "pipeline/plugin/creator/CProcessor.h"
Expand Down Expand Up @@ -136,9 +133,6 @@ void PluginRegistry::LoadStaticPlugins() {
RegisterInputCreator(new StaticInputCreator<InputNetworkSecurity>());
RegisterInputCreator(new StaticInputCreator<InputProcessSecurity>());
RegisterInputCreator(new StaticInputCreator<InputObserverNetwork>());
#ifdef __ENTERPRISE__
RegisterInputCreator(new StaticInputCreator<InputStream>());
#endif
#endif

RegisterProcessorCreator(new StaticProcessorCreator<ProcessorSplitLogStringNative>());
Expand Down
7 changes: 0 additions & 7 deletions core/plugin/input/input.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -23,17 +23,10 @@ list(APPEND THIS_SOURCE_FILES_LIST ${THIS_SOURCE_FILES})
if(MSVC)
# remove observer related files in input
list(REMOVE_ITEM THIS_SOURCE_FILES_LIST ${CMAKE_SOURCE_DIR}/plugin/input/InputObserverNetwork.cpp ${CMAKE_SOURCE_DIR}/plugin/input/InputObserverNetwork.h)
if (ENABLE_ENTERPRISE)
list(REMOVE_ITEM THIS_SOURCE_FILES_LIST ${CMAKE_SOURCE_DIR}/plugin/input/InputStream.cpp ${CMAKE_SOURCE_DIR}/plugin/input/InputStream.h)
endif ()
elseif(UNIX)
if (NOT LINUX)
# remove observer related files in input
list(REMOVE_ITEM THIS_SOURCE_FILES_LIST ${CMAKE_SOURCE_DIR}/plugin/input/InputObserverNetwork.cpp ${CMAKE_SOURCE_DIR}/plugin/input/InputObserverNetwork.h)
# remove inputStream in input
if (ENABLE_ENTERPRISE)
list(REMOVE_ITEM THIS_SOURCE_FILES_LIST ${CMAKE_SOURCE_DIR}/plugin/input/InputStream.cpp ${CMAKE_SOURCE_DIR}/plugin/input/InputStream.h)
endif ()
endif()
endif()

Expand Down
2 changes: 1 addition & 1 deletion docker/Dockerfile_coverage
Original file line number Diff line number Diff line change
Expand Up @@ -30,4 +30,4 @@ RUN python3 -m pip install --upgrade pip
RUN cp /usr/local/python3/bin/pip3 /usr/bin/pip3 && pip3 install gcovr==7.0
RUN cp /usr/local/python3/bin/gcovr /usr/bin/gcovr

CMD ["bash", "-c", "gcovr --root . --lcov coverage.lcov --txt coverage.txt -e \".*sdk.*\" -e \".*observer.*\" -e \".*lo.*\" -e \".*unittest.*\" -e \".*config_server.*\" -e \".*fuse.*\" -e \".*go_pipeline.*\""]
CMD ["bash", "-c", "gcovr --root . --json coverage.json --json-summary-pretty --json-summary summary.json -e \".*sdk.*\" -e \".*observer.*\" -e \".*logger.*\" -e \".*unittest.*\" -e \".*config_server.*\" -e \".*go_pipeline.*\" -e \".*application.*\" -e \".*protobuf.*\" -e \".*runner.*\""]
10 changes: 7 additions & 3 deletions plugins/input/canal/input_canal.go
Original file line number Diff line number Diff line change
Expand Up @@ -759,9 +759,13 @@ func (sc *ServiceCanal) Start(c pipeline.Collector) error {
startPos.Pos = sc.checkpoint.Offset
}
if nil == gtid && 0 == len(startPos.Name) && !sc.StartFromBegining {
gtid, err = sc.getLatestGTID()
if err != nil {
logger.Warning(sc.context.GetRuntimeContext(), "CANAL_START_ALARM", "Call getLatestGTID failed, error", err)
if sc.isGTIDEnabled {
gtid, err = sc.getLatestGTID()
if err != nil {
logger.Warning(sc.context.GetRuntimeContext(), "CANAL_START_ALARM", "Call getLatestGTID failed, error", err)
}
}
if gtid == nil {
startPos = sc.GetBinlogLatestPos()
}
logger.Infof(sc.context.GetRuntimeContext(), "Get latest checkpoint GTID: %v Position: %v", gtid, startPos)
Expand Down
129 changes: 90 additions & 39 deletions tools/coverage-diff/main.py
Original file line number Diff line number Diff line change
@@ -1,53 +1,104 @@
import argparse
import subprocess
import sys
import time
import json
import re

ERROR_COLOR = '\033[31m'
RESET_COLOR = '\033[0m'

def get_changed_files():
try:
# Run the git command to get the list of changed files
result = subprocess.Popen('git diff --name-only -r HEAD^1 HEAD', shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Split the result by new line to get each file name
out, err = result.communicate()
changed_files = out.splitlines()
result_files = []
for file in changed_files:
fileStr = file.decode('utf-8')
if fileStr.startswith('core'):
result_files.append(fileStr[5:])
return result_files
result = subprocess.check_output(['git', 'diff', '--unified=0', 'HEAD^1' ,'HEAD'], universal_newlines=True)
return result
except subprocess.CalledProcessError as e:
print(f"An error occurred while running git command: {e}")
print(f'An error occurred while running git command: {e}')
return []

def parse_diff(diff_output):
changes = {}

current_file = None
for line in diff_output.split('\n'):
# 识别文件名
file_match = re.match(r'^diff --git a/(.*) b/(.*)$', line)
if file_match:
current_file = file_match.group(2)
changes[current_file] = []
continue

# 识别文件中的行变化
hunk_match = re.match(r'^@@ -\d+(,\d+)? \+(\d+)(,(\d+))? @@', line)
if hunk_match and current_file:
start_line = int(hunk_match.group(2))
line_count = int(hunk_match.group(4) if hunk_match.group(4) else 1)
for i in range(start_line, start_line + line_count):
changes[current_file].append(i)

return changes

if __name__ == '__main__':
parser = argparse.ArgumentParser(description="A simple argparse example")
parser.add_argument("path", type=str, help="The path of coverage file")
parser = argparse.ArgumentParser(description='A simple argparse example')
parser.add_argument('--path', type=str, help='The path of coverage file')
parser.add_argument('--summary_path', type=str, help='The path of coverage file')
args = parser.parse_args()
changed_files = get_changed_files()
line_cache = ""
not_satified = []
changed_lines = parse_diff(changed_files)

with open(args.summary_path, 'r') as file:
summary = json.load(file)
print('='*20)
print('Total coverage rate: ', summary['line_percent'], '%')
print('='*20)

with open(args.path, 'r') as file:
for line in file:
if len(line_cache) > 0:
line = line_cache + line
line_cache = ""
if '/' in line or ('%' in line and 'TOTAL' not in line):
for changed_file in changed_files:
if line.startswith(changed_file):
units = line.split()
if len(units) < 4:
# some files with long filename will be split into 2 lines
line_cache = line
continue
coverage_rate = int(units[3][:-1])
if coverage_rate < 50:
not_satified.append(changed_file)
print(line, flush=True)
break
else:
print(line, flush=True)
if len(not_satified) > 0:
print(f"Coverage rate is less than 50% for the following files: {not_satified}", flush=True)
coverage = json.load(file)
not_satified = {}
not_satified_count = 0
satified_count = 0

for file in coverage['files']:
if 'core/' + file['file'] in changed_lines:
file_name = 'core/' + file['file']
cur_satified = []
cur_not_satified = []
i = 0
j = 0
while i < len(file['lines']) and j < len(changed_lines[file_name]):
if file['lines'][i]['line_number'] == changed_lines[file_name][j]:
if file['lines'][i]['count'] == 0:
cur_not_satified.append(file['lines'][i]['line_number'])
else:
cur_satified.append(file['lines'][i]['line_number'])
i += 1
j += 1
elif file['lines'][i]['line_number'] < changed_lines[file_name][j]:
i += 1
else:
j += 1
if len(cur_satified) > 0 or len(cur_not_satified) > 0:
print('file: ', file_name)
if len(cur_satified) > 0:
print('covered lines: ', cur_satified)
satified_count += len(cur_satified)
if len(cur_not_satified) > 0:
print(f'{ERROR_COLOR}not covered lines:{RESET_COLOR} ', cur_not_satified)
not_satified_count += len(cur_not_satified)
print('')
if len(cur_not_satified) > 0:
not_satified[file_name] = cur_not_satified

if not_satified_count + satified_count == 0:
print('No line to cover', flush=True)
sys.exit(0)

coverage_rate = ((satified_count) / (not_satified_count + satified_count) ) * 100
print('='*20)
if coverage_rate < 50:
print(f'{ERROR_COLOR}Diff coverage rate is less than 50%: {coverage_rate:.1f}%{RESET_COLOR}', flush=True)
print('='*20)
sys.exit(1)
else:
print(f'Diff coverage rate is {coverage_rate:.1f}%', flush=True)
print('='*20)
sys.exit(0)

0 comments on commit 1cda480

Please sign in to comment.