Skip to content

Commit

Permalink
Ecapsulate saidump.cpp with class and add unittest codes.
Browse files Browse the repository at this point in the history
  • Loading branch information
JunhongMao committed Jun 13, 2024
1 parent 4b32eaf commit 1b9d9d5
Show file tree
Hide file tree
Showing 10 changed files with 308 additions and 106 deletions.
1 change: 1 addition & 0 deletions configure.ac
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ AC_OUTPUT(Makefile
unittest/vslib/Makefile
unittest/syncd/Makefile
unittest/proxylib/Makefile
unittest/saidump/Makefile
pyext/Makefile
pyext/py2/Makefile
pyext/py3/Makefile)
5 changes: 4 additions & 1 deletion saidump/Makefile.am
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,11 @@ AM_CXXFLAGS = $(SAIINC) -I$(top_srcdir)/lib

bin_PROGRAMS = saidump

saidump_SOURCES = saidump.cpp
saidump_SOURCES = main.cpp saidump.cpp
saidump_CPPFLAGS = $(CODE_COVERAGE_CPPFLAGS)
saidump_CXXFLAGS = $(DBGFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS_COMMON) $(CODE_COVERAGE_CXXFLAGS)
saidump_LDADD = -lhiredis -lswsscommon -lpthread -L$(top_srcdir)/meta/.libs -lsaimetadata -lsaimeta \
-L$(top_srcdir)/lib/.libs -lsairedis -lzmq $(CODE_COVERAGE_LIBS)

noinst_LIBRARIES = libsaidump.a
libsaidump_a_SOURCES = saidump.cpp
19 changes: 19 additions & 0 deletions saidump/main.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#include "saidump.h"

using namespace syncd;

int main(int argc, char **argv)
{
SWSS_LOG_ENTER();
swss::Logger::getInstance().setMinPrio(swss::Logger::SWSS_INFO);

SaiDump m_saiDump;

if(SAI_STATUS_SUCCESS != m_saiDump.handleCmdLine(argc, argv))
{
return EXIT_FAILURE;
}

m_saiDump.dumpFromRedisDb();
return EXIT_SUCCESS;
}
152 changes: 49 additions & 103 deletions saidump/saidump.cpp
Original file line number Diff line number Diff line change
@@ -1,45 +1,7 @@
#include <inttypes.h>
#include <string>
#include <set>
#include <sstream>
#include <iostream>
#include <fstream>
#include <regex>
#include <climits>

extern "C" {
#include <sai.h>
}

#include "swss/table.h"
#include "meta/sai_serialize.h"
#include "sairediscommon.h"
#include <nlohmann/json.hpp>

#include <getopt.h>

// TODO split to multiple cpp

using namespace swss;
using json = nlohmann::json;
#include "saidump.h"
using namespace syncd;

// Default value: 100 MB
constexpr int64_t RDB_JSON_MAX_SIZE = 1024 * 1024 * 100;

struct CmdOptions
{
bool skipAttributes;
bool dumpTempView;
bool dumpGraph;
std::string rdbJsonFile;
uint64_t rdbJSonSizeLimit;
};


static CmdOptions g_cmdOptions;
static std::map<sai_object_id_t, const TableMap*> g_oid_map;

void printUsage()
void SaiDump::printUsage()
{
SWSS_LOG_ENTER();

Expand All @@ -56,15 +18,14 @@ void printUsage()
std::cout << " Print out this message" << std::endl;
}

CmdOptions handleCmdLine(int argc, char **argv)
sai_status_t SaiDump::handleCmdLine(int argc, char **argv)
{
SWSS_LOG_ENTER();

CmdOptions options;

options.dumpTempView = false;
options.dumpGraph = false;
options.rdbJSonSizeLimit = RDB_JSON_MAX_SIZE;
sai_status_t status = SAI_STATUS_SUCCESS;
dumpTempView = false;
dumpGraph = false;
rdbJSonSizeLimit = RDB_JSON_MAX_SIZE;

const char* const optstring = "gtr:m:h";
uint64_t result = 0;
Expand Down Expand Up @@ -94,17 +55,20 @@ CmdOptions handleCmdLine(int argc, char **argv)
{
case 'g':
SWSS_LOG_NOTICE("Dumping graph");
options.dumpGraph = true;
dumpGraph = true;
status = SAI_STATUS_SUCCESS;
break;

case 't':
SWSS_LOG_NOTICE("Dumping temp view");
options.dumpTempView = true;
dumpTempView = true;
status = SAI_STATUS_SUCCESS;
break;

case 'r':
SWSS_LOG_NOTICE("Dumping from %s", optarg);
options.rdbJsonFile = std::string(optarg);
rdbJsonFile = std::string(optarg);
status = SAI_STATUS_SUCCESS;
break;

case 'm':
Expand All @@ -124,9 +88,9 @@ CmdOptions handleCmdLine(int argc, char **argv)
exit(EXIT_SUCCESS);
}

options.rdbJSonSizeLimit = result * 1024 * 1024;
SWSS_LOG_NOTICE("Configure the RDB JSON MAX size to %llu MB", options.rdbJSonSizeLimit / 1024 / 1024);

rdbJSonSizeLimit = result * 1024 * 1024;
SWSS_LOG_NOTICE("Configure the RDB JSON MAX size to %llu MB", rdbJSonSizeLimit / 1024 / 1024);
status = SAI_STATUS_SUCCESS;
break;

case 'h':
Expand All @@ -143,11 +107,10 @@ CmdOptions handleCmdLine(int argc, char **argv)
exit(EXIT_FAILURE);
}
}

return options;
return status;
}

size_t get_max_attr_len(const TableMap& map)
size_t SaiDump::get_max_attr_len(const swss::TableMap& map)
{
SWSS_LOG_ENTER();

Expand All @@ -161,7 +124,7 @@ size_t get_max_attr_len(const TableMap& map)
return max;
}

std::string pad_string(std::string s, size_t pad)
std::string SaiDump::pad_string(std::string s, size_t pad)
{
SWSS_LOG_ENTER();

Expand All @@ -175,7 +138,7 @@ std::string pad_string(std::string s, size_t pad)
return s;
}

const TableMap* get_table_map(sai_object_id_t object_id)
const swss::TableMap* SaiDump::get_table_map(sai_object_id_t object_id)
{
SWSS_LOG_ENTER();

Expand All @@ -190,7 +153,7 @@ const TableMap* get_table_map(sai_object_id_t object_id)
return it->second;
}

void print_attributes(size_t indent, const TableMap& map)
void SaiDump::print_attributes(size_t indent, const swss::TableMap& map)
{
SWSS_LOG_ENTER();

Expand Down Expand Up @@ -218,7 +181,7 @@ void print_attributes(size_t indent, const TableMap& map)
#define GV_ROOT_COLOR "0.650 0.200 1.000"
#define GV_NODE_COLOR "0.650 0.500 1.000"

void dumpGraph(const TableDump& td)
void SaiDump::dumpGraphFun(const swss::TableDump& td)
{
SWSS_LOG_ENTER();

Expand Down Expand Up @@ -451,7 +414,7 @@ void dumpGraph(const TableDump& td)
/**
* @brief Process the input JSON file to make sure it's a valid JSON file for the JSON library.
*/
static sai_status_t preProcessFile(const std::string file_name)
sai_status_t SaiDump::preProcessFile(const std::string file_name)
{
SWSS_LOG_ENTER();

Expand All @@ -465,11 +428,11 @@ static sai_status_t preProcessFile(const std::string file_name)

input_file.seekg(0, std::ios::end); // Move to the end of the file
uint64_t file_size = input_file.tellg(); // Get the current position
SWSS_LOG_NOTICE("Get %s's size %" PRIu64 " Bytes, limit: %" PRIu64 " MB.", file_name.c_str(), file_size, g_cmdOptions.rdbJSonSizeLimit / 1024 / 1024);
SWSS_LOG_NOTICE("Get %s's size %" PRIu64 " Bytes, limit: %" PRIu64 " MB.", file_name.c_str(), file_size, rdbJSonSizeLimit / 1024 / 1024);

if (file_size >= g_cmdOptions.rdbJSonSizeLimit)
if (file_size >= rdbJSonSizeLimit)
{
SWSS_LOG_ERROR_AND_STDERR("Get %s's size failure or its size %" PRIu64 " >= %" PRIu64 " MB.", file_name.c_str(), file_size, g_cmdOptions.rdbJSonSizeLimit / 1024 / 1024);
SWSS_LOG_ERROR_AND_STDERR("Get %s's size failure or its size %" PRIu64 " >= %" PRIu64 " MB.", file_name.c_str(), file_size, rdbJSonSizeLimit / 1024 / 1024);
return SAI_STATUS_FAILURE;
}

Expand Down Expand Up @@ -502,7 +465,7 @@ static sai_status_t preProcessFile(const std::string file_name)
return SAI_STATUS_SUCCESS;
}

static void traverseJson(const json & jsn)
void SaiDump::traverseJson(const nlohmann::json & jsn)
{
SWSS_LOG_ENTER();
if (jsn.is_object())
Expand Down Expand Up @@ -533,14 +496,14 @@ static void traverseJson(const json & jsn)
}

std::cout << item_name << " " << std::endl;
json jsn_sub = it.value();
nlohmann::json jsn_sub = it.value();

if (!it->is_object())
{
continue;
}

TableMap map;
swss::TableMap map;

for (auto it_sub = jsn_sub.begin(); it_sub != jsn_sub.end(); ++it_sub)
{
Expand Down Expand Up @@ -574,71 +537,61 @@ static void traverseJson(const json & jsn)
}
}

static sai_status_t dumpFromRedisRdbJson(const std::string file_name)
sai_status_t SaiDump::dumpFromRedisRdbJson()
{
SWSS_LOG_ENTER();

std::ifstream input_file(file_name);
if (SAI_STATUS_FAILURE == preProcessFile(rdbJsonFile))
{
return SAI_STATUS_FAILURE;
}

std::ifstream input_file(rdbJsonFile);

if (!input_file.is_open())
{
SWSS_LOG_ERROR_AND_STDERR("The file %s does not exist for dumping from Redis RDB JSON file.", file_name.c_str());
SWSS_LOG_ERROR_AND_STDERR("The file %s does not exist for dumping from Redis RDB JSON file.", rdbJsonFile.c_str());
return SAI_STATUS_FAILURE;
}

try
{
// Parse the JSON data from the file (validation)
json jsonData;
nlohmann::json jsonData;
input_file >> jsonData;
traverseJson(jsonData);
return SAI_STATUS_SUCCESS;
}
catch (std::exception &ex)
{
SWSS_LOG_ERROR_AND_STDERR("JSON file %s is invalid.", file_name.c_str());
SWSS_LOG_ERROR_AND_STDERR("JSON file %s is invalid.", rdbJsonFile.c_str());
SWSS_LOG_ERROR_AND_STDERR("JSON parsing error: %s.", ex.what());
}

return SAI_STATUS_FAILURE;
}

int main(int argc, char **argv)
void SaiDump::dumpFromRedisDb()
{
swss::Logger::getInstance().setMinPrio(swss::Logger::SWSS_DEBUG);

SWSS_LOG_ENTER();

swss::Logger::getInstance().setMinPrio(swss::Logger::SWSS_NOTICE);

swss::Logger::getInstance().setMinPrio(swss::Logger::SWSS_INFO);

g_cmdOptions = handleCmdLine(argc, argv);


if (g_cmdOptions.rdbJsonFile.size() > 0)
if (rdbJsonFile.size() > 0)
{
if (SAI_STATUS_FAILURE == preProcessFile(g_cmdOptions.rdbJsonFile))
{
return EXIT_FAILURE;
}

return dumpFromRedisRdbJson(g_cmdOptions.rdbJsonFile);
dumpFromRedisRdbJson();
return;
}

swss::DBConnector db("ASIC_DB", 0);

std::string table = ASIC_STATE_TABLE;

if (g_cmdOptions.dumpTempView)
if (dumpTempView)
{
table = TEMP_PREFIX + table;
}

swss::Table t(&db, table);

TableDump dump;

swss::TableDump dump;
t.dump(dump);

for (const auto&key: dump)
Expand All @@ -656,32 +609,25 @@ int main(int argc, char **argv)
{
sai_object_id_t object_id;
sai_deserialize_object_id(str_object_id, object_id);

g_oid_map[object_id] = &key.second;
}
}

if (g_cmdOptions.dumpGraph)
if (dumpGraph)
{
dumpGraph(dump);

return EXIT_SUCCESS;
dumpGraphFun(dump);
return;
}

for (const auto&key: dump)
{
auto start = key.first.find_first_of(":");
auto str_object_type = key.first.substr(0, start);
auto str_object_id = key.first.substr(start + 1);

std::cout << str_object_type << " " << str_object_id << " " << std::endl;

size_t indent = 4;

print_attributes(indent, key.second);

std::cout << std::endl;
}

return EXIT_SUCCESS;
}
Loading

0 comments on commit 1b9d9d5

Please sign in to comment.