Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

minor opt: minor optimization to the orca parser #36492

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 15 additions & 13 deletions source/common/orca/orca_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -153,25 +153,27 @@ absl::StatusOr<OrcaLoadReport> parseOrcaLoadReportHeaders(const HeaderMap& heade
const auto header_value = header_bin[0]->value().getStringView();
RETURN_IF_NOT_OK(tryParseSerializedBinary(header_value, load_report));
} else if (const auto header = headers.get(endpointLoadMetricsHeader()); !header.empty()) {
std::pair<absl::string_view, absl::string_view> split_header =
absl::StrSplit(header[0]->value().getStringView(), absl::MaxSplits(' ', 1));

if (split_header.first == kHeaderFormatPrefixBin) { // Binary protobuf format.
RETURN_IF_NOT_OK(tryParseSerializedBinary(split_header.second, load_report));
} else if (split_header.first == kHeaderFormatPrefixText) { // Native HTTP format.
RETURN_IF_NOT_OK(tryParseNativeHttpEncoded(split_header.second, load_report));
} else if (split_header.first == kHeaderFormatPrefixJson) { // JSON format.
absl::string_view header_value = header[0]->value().getStringView();

if (absl::StartsWith(header_value, kHeaderFormatPrefixBin)) {
// Binary protobuf format.
RETURN_IF_NOT_OK(tryParseSerializedBinary(header_value.substr(kHeaderFormatPrefixBin.size()),
load_report));
} else if (absl::StartsWith(header_value, kHeaderFormatPrefixText)) {
// Native HTTP format.
RETURN_IF_NOT_OK(tryParseNativeHttpEncoded(
header_value.substr(kHeaderFormatPrefixText.size()), load_report));
} else if (absl::StartsWith(header_value, kHeaderFormatPrefixJson)) {
// JSON format.
#if defined(ENVOY_ENABLE_FULL_PROTOS) && defined(ENVOY_ENABLE_YAML)
const std::string json_string = std::string(split_header.second);
bool has_unknown_field = false;
RETURN_IF_ERROR(
Envoy::MessageUtil::loadFromJsonNoThrow(json_string, load_report, has_unknown_field));
RETURN_IF_ERROR(Envoy::MessageUtil::loadFromJsonNoThrow(
header_value.substr(kHeaderFormatPrefixJson.size()), load_report, has_unknown_field));
#else
IS_ENVOY_BUG("JSON formatted ORCA header support not implemented for this build");
#endif // !ENVOY_ENABLE_FULL_PROTOS || !ENVOY_ENABLE_YAML
} else {
return absl::InvalidArgumentError(
fmt::format("unsupported ORCA header format: {}", split_header.first));
return absl::InvalidArgumentError(fmt::format("unsupported ORCA header format"));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd consider keeping some string as part of the error.
I think it makes sense to keep the minimum between the first 5 characters, and the prefix up to the first occurrence of a " " (the delimiter).
It makes it easier to debug if/when things go wrong with an external upstream server.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

SGTM.

}
} else {
return absl::NotFoundError("no ORCA data sent from the backend");
Expand Down
6 changes: 3 additions & 3 deletions source/common/orca/orca_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ namespace Orca {
static constexpr absl::string_view kEndpointLoadMetricsHeader = "endpoint-load-metrics";
static constexpr absl::string_view kEndpointLoadMetricsHeaderBin = "endpoint-load-metrics-bin";
// Prefix used to determine format expected in kEndpointLoadMetricsHeader.
static constexpr absl::string_view kHeaderFormatPrefixBin = "BIN";
static constexpr absl::string_view kHeaderFormatPrefixJson = "JSON";
static constexpr absl::string_view kHeaderFormatPrefixText = "TEXT";
static constexpr absl::string_view kHeaderFormatPrefixBin = "BIN ";
static constexpr absl::string_view kHeaderFormatPrefixJson = "JSON ";
static constexpr absl::string_view kHeaderFormatPrefixText = "TEXT ";
// The following fields are the names of the metrics tracked in the ORCA load
// report proto.
static constexpr absl::string_view kApplicationUtilizationField = "application_utilization";
Expand Down
6 changes: 3 additions & 3 deletions source/common/protobuf/utility.h
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it make sense to split changes to the utility files/methods out into another PR as their impact isn't limited to ORCA code?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OKay to me.

Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ class MessageUtil {
static std::size_t hash(const Protobuf::Message& message);

#ifdef ENVOY_ENABLE_YAML
static void loadFromJson(const std::string& json, Protobuf::Message& message,
static void loadFromJson(absl::string_view json, Protobuf::Message& message,
ProtobufMessage::ValidationVisitor& validation_visitor);
/**
* Return ok only when strict conversion(don't ignore unknown field) succeeds.
Expand All @@ -264,9 +264,9 @@ class MessageUtil {
* Return error status for relaxed conversion and set has_unknown_field to false if relaxed
* conversion(ignore unknown field) fails.
*/
static absl::Status loadFromJsonNoThrow(const std::string& json, Protobuf::Message& message,
static absl::Status loadFromJsonNoThrow(absl::string_view json, Protobuf::Message& message,
bool& has_unknown_fileld);
static void loadFromJson(const std::string& json, ProtobufWkt::Struct& message);
static void loadFromJson(absl::string_view json, ProtobufWkt::Struct& message);
static void loadFromYaml(const std::string& yaml, Protobuf::Message& message,
ProtobufMessage::ValidationVisitor& validation_visitor);
#endif
Expand Down
6 changes: 3 additions & 3 deletions source/common/protobuf/yaml_utility.cc
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does it make sense to split changes to the utility files/methods out into another PR as their impact isn't limited to ORCA code?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

See #36525

Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ void jsonConvertInternal(const Protobuf::Message& source,

} // namespace

void MessageUtil::loadFromJson(const std::string& json, Protobuf::Message& message,
void MessageUtil::loadFromJson(absl::string_view json, Protobuf::Message& message,
ProtobufMessage::ValidationVisitor& validation_visitor) {
bool has_unknown_field;
auto load_status = loadFromJsonNoThrow(json, message, has_unknown_field);
Expand All @@ -132,7 +132,7 @@ void MessageUtil::loadFromJson(const std::string& json, Protobuf::Message& messa
}
}

absl::Status MessageUtil::loadFromJsonNoThrow(const std::string& json, Protobuf::Message& message,
absl::Status MessageUtil::loadFromJsonNoThrow(absl::string_view json, Protobuf::Message& message,
bool& has_unknown_fileld) {
has_unknown_fileld = false;
Protobuf::util::JsonParseOptions options;
Expand Down Expand Up @@ -163,7 +163,7 @@ absl::Status MessageUtil::loadFromJsonNoThrow(const std::string& json, Protobuf:
return relaxed_status;
}

void MessageUtil::loadFromJson(const std::string& json, ProtobufWkt::Struct& message) {
void MessageUtil::loadFromJson(absl::string_view json, ProtobufWkt::Struct& message) {
// No need to validate if converting to a Struct, since there are no unknown
// fields possible.
loadFromJson(json, message, ProtobufMessage::getNullValidationVisitor());
Expand Down
16 changes: 5 additions & 11 deletions test/common/orca/orca_parser_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,11 @@ namespace Envoy {
namespace Orca {
namespace {

const std::string formattedHeaderPrefixText() {
CONSTRUCT_ON_FIRST_USE(std::string, absl::StrCat(kHeaderFormatPrefixText, " "));
}
absl::string_view formattedHeaderPrefixText() { return kHeaderFormatPrefixText; }

const std::string formattedHeaderPrefixJson() {
CONSTRUCT_ON_FIRST_USE(std::string, absl::StrCat(kHeaderFormatPrefixJson, " "));
}
absl::string_view formattedHeaderPrefixJson() { return kHeaderFormatPrefixJson; }

const std::string formattedHeaderPrefixBin() {
CONSTRUCT_ON_FIRST_USE(std::string, absl::StrCat(kHeaderFormatPrefixBin, " "));
}
absl::string_view formattedHeaderPrefixBin() { return kHeaderFormatPrefixBin; }
wbpcode marked this conversation as resolved.
Show resolved Hide resolved

// Returns an example OrcaLoadReport proto with all fields populated.
static xds::data::orca::v3::OrcaLoadReport exampleOrcaLoadReport() {
Expand Down Expand Up @@ -62,14 +56,14 @@ TEST(OrcaParserUtilTest, InvalidOrcaHeaderPrefix) {
{std::string(kEndpointLoadMetricsHeader), "BAD random-value"}};
EXPECT_THAT(
parseOrcaLoadReportHeaders(headers),
StatusHelpers::HasStatus(absl::InvalidArgumentError("unsupported ORCA header format: BAD")));
StatusHelpers::HasStatus(absl::InvalidArgumentError("unsupported ORCA header format")));
}

TEST(OrcaParserUtilTest, EmptyOrcaHeader) {
Http::TestRequestHeaderMapImpl headers{{std::string(kEndpointLoadMetricsHeader), ""}};
EXPECT_THAT(
parseOrcaLoadReportHeaders(headers),
StatusHelpers::HasStatus(absl::InvalidArgumentError("unsupported ORCA header format: ")));
StatusHelpers::HasStatus(absl::InvalidArgumentError("unsupported ORCA header format")));
}

TEST(OrcaParserUtilTest, NativeHttpEncodedHeader) {
Expand Down
Loading