diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 939872ba..02b711f8 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -8,10 +8,17 @@ on: jobs: autoformatter: + strategy: + matrix: + source-dir: ["./src/", "./examples/src/", "./slo/src/"] + include: + - source-dir: "./src/" + solutionFile: "YdbSdk.sln" + - source-dir: "./examples/src/" + solutionFile: "YdbExamples.sln" + - source-dir: "./slo/src/" + solutionFile: "src.sln" name: autoformat check - concurrency: - group: lint-autoformat-${{ github.ref }} - cancel-in-progress: true runs-on: ubuntu-latest steps: - name: Checkout @@ -21,15 +28,18 @@ jobs: with: dotnet-version: '6.0.x' - name: Restore - run: dotnet restore ./src/YdbSdk.sln + run: dotnet restore ${{ matrix.source-dir }}${{ matrix.solutionFile }} - name: Install ReSharper - run: dotnet tool install -g JetBrains.ReSharper.GlobalTools + run: dotnet tool install -g JetBrains.ReSharper.GlobalTools --version 2023.2.1 - name: format all files with auto-formatter - run: bash ./.github/scripts/format-all-dotnet-code.sh ./src/ YdbSdk.sln "Custom Cleanup" + run: bash ./.github/scripts/format-all-dotnet-code.sh ${{ matrix.source-dir }} ${{ matrix.solutionFile }} "Custom Cleanup" - name: Check repository diff run: bash ./.github/scripts/check-work-copy-equals-to-committed.sh "auto-format broken" inspection: + strategy: + matrix: + solutionPath: ["./src/YdbSdk.sln", "./examples/src/YdbExamples.sln", "./slo/src/src.sln"] runs-on: ubuntu-latest name: Inspection steps: @@ -40,11 +50,11 @@ jobs: with: dotnet-version: '6.0.x' - name: Restore - run: dotnet restore ./src/YdbSdk.sln + run: dotnet restore ${{ matrix.solutionPath }} - name: Inspect code uses: muno92/resharper_inspectcode@v1 with: - solutionPath: ./src/YdbSdk.sln + solutionPath: ${{ matrix.solutionPath }} version: 2023.2.1 include: | **.cs diff --git a/.github/workflows/slo.yml b/.github/workflows/slo.yml new file mode 100644 index 00000000..0425dda0 --- /dev/null +++ b/.github/workflows/slo.yml @@ -0,0 +1,68 @@ +on: + push: + branches: [main] + pull_request: + branches: [main] + workflow_dispatch: + +name: SLO + +jobs: + test-slo: + concurrency: + group: slo-${{ github.ref }} + if: (!contains(github.event.pull_request.labels.*.name, 'no slo')) + + runs-on: ubuntu-latest + name: SLO test + permissions: + checks: write + pull-requests: write + contents: read + issues: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + if: env.DOCKER_REPO != null + env: + DOCKER_REPO: ${{ secrets.SLO_DOCKER_REPO }} + + - name: Run SLO + uses: ydb-platform/slo-tests@php-version + if: env.DOCKER_REPO != null + env: + DOCKER_REPO: ${{ secrets.SLO_DOCKER_REPO }} + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + KUBECONFIG_B64: ${{ secrets.SLO_KUBE_CONFIG }} + AWS_CREDENTIALS_B64: ${{ secrets.SLO_AWS_CREDENTIALS }} + AWS_CONFIG_B64: ${{ secrets.SLO_AWS_CONFIG }} + DOCKER_USERNAME: ${{ secrets.SLO_DOCKER_USERNAME }} + DOCKER_PASSWORD: ${{ secrets.SLO_DOCKER_PASSWORD }} + DOCKER_REPO: ${{ secrets.SLO_DOCKER_REPO }} + DOCKER_FOLDER: ${{ secrets.SLO_DOCKER_FOLDER }} + s3_endpoint: ${{ secrets.SLO_S3_ENDPOINT }} + s3_images_folder: ${{ vars.SLO_S3_IMAGES_FOLDER }} + grafana_domain: ${{ vars.SLO_GRAFANA_DOMAIN }} + # grafana_dashboard: ${{ vars.SLO_GRAFANA_DASHBOARD }} + grafana_dashboard: dca60386-0d3d-43f5-a2af-5f3fd3e3b295 + grafana_dashboard_width: 2000 + grafana_dashboard_height: 2300 + ydb_version: 'newest' + timeBetweenPhases: 30 + shutdownTime: 30 + + language_id0: 'dotnet' + workload_path0: 'slo/src' + language0: '.NET SDK' + workload_build_context0: ../.. + workload_build_options0: -f Dockerfile + + - uses: actions/upload-artifact@v3 + if: always() && env.DOCKER_REPO != null + env: + DOCKER_REPO: ${{ secrets.SLO_DOCKER_REPO }} + with: + name: slo-logs + path: logs/ diff --git a/CHANGELOG.md b/CHANGELOG.md index fad8adde..5f25bbaa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +## v0.1.5 +- Fix timeout error on create session +- Fix transport error on delete session + +## v0.1.4 +- Add exception throwing when results truncated +- lint: add line feed at file end + +## v0.1.3 - Add static auth ## v0.1.1 - Add static code analysis diff --git a/README.md b/README.md index 6fb188e0..e7498983 100644 --- a/README.md +++ b/README.md @@ -92,4 +92,4 @@ foreach (var row in resultSet.Rows) ## Examples -See **[ydb-dotnet-examples](https://github.com/ydb-platform/ydb-dotnet-examples)**. +See **[examples folder](https://github.com/ydb-platform/ydb-dotnet-sdk/tree/main/examples)** diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..b1ff27e6 --- /dev/null +++ b/examples/README.md @@ -0,0 +1,35 @@ +# YDB .NET SDK Examples + +## Prerequisites +.NET 6 + +## Running examples + +1. Clone repository + ```bash + git clone https://github.com/ydb-platform/ydb-dotnet-sdk.git + ``` + +2. Build solution + ```bash + cd ydb-dotnet-sdk/examples/src + dotnet build + ``` + +3. Run example + ```bash + cd + dotnet run -e -d + ``` + +## Provided examples + +### BasicExample +Demonstrates basic operations with YDB, including: +* Driver initialization +* Table client initialization +* Table creation via SchemeQuery (DDL) +* Data queries (OLTP) & transactions (read, modify) +* Interactive transactions +* ReadTable for streaming read of table contents +* ScanQuery for streaming wide queries (OLAP) diff --git a/examples/src/BasicExample/BasicExample.cs b/examples/src/BasicExample/BasicExample.cs new file mode 100644 index 00000000..4bb2607d --- /dev/null +++ b/examples/src/BasicExample/BasicExample.cs @@ -0,0 +1,81 @@ +using System; +using System.Security.Cryptography.X509Certificates; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Ydb.Sdk.Auth; +using Ydb.Sdk.Services.Table; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample : TableExampleBase +{ + private BasicExample(TableClient client, string database, string path) + : base(client, database, path) + { + } + + public static async Task Run( + string endpoint, + string database, + ICredentialsProvider credentialsProvider, + X509Certificate? customServerCertificate, + string path, + ILoggerFactory loggerFactory) + { + var config = new DriverConfig( + endpoint: endpoint, + database: database, + credentials: credentialsProvider, + customServerCertificate: customServerCertificate + ); + + await using var driver = await Driver.CreateInitialized( + config: config, + loggerFactory: loggerFactory + ); + + using var tableClient = new TableClient(driver, new TableClientConfig()); + + var example = new BasicExample(tableClient, database, path); + + await example.SchemeQuery(); + await example.FillData(); + await example.SimpleSelect(1); + await example.SimpleUpsert(10, "Coming soon", DateTime.UtcNow); + await example.SimpleSelect(10); + await example.InteractiveTx(); + await example.ReadTable(); + await example.ScanQuery(DateTime.Parse("2007-01-01")); + } + + private static ExecuteDataQuerySettings DefaultDataQuerySettings => + new() + { + // Indicates that client is no longer interested in the result of operation after the + // specified duration starting from the moment when operation arrives at the server. + // Status code TIMEOUT will be returned from server in case when operation result in + // not available in the specified time period. This status code doesn't indicate the result + // of operation, it might be completed or cancelled. + OperationTimeout = TimeSpan.FromSeconds(1), + + // Transport timeout from the moment operation was sent to server. It is useful in case + // of possible network issues, to that query doesn't hang forever. + // It is recommended to set this value to a larger value than OperationTimeout to give + // server some time to issue a response. + TransportTimeout = TimeSpan.FromSeconds(5), + + // Keep query compilation result in query cache or not. Should be false for ad-hoc queries, + // and true (default) for high-RPS queries. + KeepInQueryCache = false + }; + + private ExecuteDataQuerySettings DefaultCachedDataQuerySettings + { + get + { + var settings = DefaultDataQuerySettings; + settings.KeepInQueryCache = true; + return settings; + } + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/BasicExample.csproj b/examples/src/BasicExample/BasicExample.csproj new file mode 100644 index 00000000..bc6690be --- /dev/null +++ b/examples/src/BasicExample/BasicExample.csproj @@ -0,0 +1,28 @@ + + + + + Exe + net6.0 + enable + Ydb.Sdk.Examples.BasicExample + Ydb.Sdk.Examples + + + + git + https://github.com/ydb-platform/ydb-dotnet-examples + https://github.com/ydb-platform/ydb-dotnet-examples + YANDEX LLC + + + + + + + + + + + + diff --git a/examples/src/BasicExample/DataQuery.cs b/examples/src/BasicExample/DataQuery.cs new file mode 100644 index 00000000..f27406ac --- /dev/null +++ b/examples/src/BasicExample/DataQuery.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Ydb.Sdk.Services.Table; +using Ydb.Sdk.Value; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + private async Task SimpleSelect(ulong id) + { + var response = await Client.SessionExec(async session => + { + var query = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $id AS Uint64; + + SELECT + series_id, + title, + release_date + FROM series + WHERE series_id = $id; + "; + + return await session.ExecuteDataQuery( + query: query, + txControl: TxControl.BeginSerializableRW().Commit(), + parameters: new Dictionary + { + { "$id", YdbValue.MakeUint64(id) } + }, + settings: DefaultCachedDataQuerySettings + ); + }); + + response.Status.EnsureSuccess(); + + var queryResponse = (ExecuteDataQueryResponse)response; + var resultSet = queryResponse.Result.ResultSets[0]; + + Console.WriteLine($"> SimpleSelect, " + + $"columns: {resultSet.Columns.Count}, " + + $"rows: {resultSet.Rows.Count}, " + + $"truncated: {resultSet.Truncated}"); + + foreach (var row in resultSet.Rows) + { + Console.WriteLine($"> Series, " + + $"series_id: {(ulong?)row["series_id"]}, " + + $"title: {(string?)row["title"]}, " + + $"release_date: {(DateTime?)row["release_date"]}"); + } + } + + private async Task SimpleUpsert(ulong id, string title, DateTime date) + { + var response = await Client.SessionExec(async session => + { + var query = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $id AS Uint64; + DECLARE $title AS Utf8; + DECLARE $release_date AS Date; + + UPSERT INTO series (series_id, title, release_date) VALUES + ($id, $title, $release_date); + "; + + return await session.ExecuteDataQuery( + query: query, + txControl: TxControl.BeginSerializableRW().Commit(), + parameters: new Dictionary + { + { "$id", YdbValue.MakeUint64(id) }, + { "$title", YdbValue.MakeUtf8(title) }, + { "$release_date", YdbValue.MakeDate(date) } + }, + settings: DefaultCachedDataQuerySettings + ); + }); + + response.Status.EnsureSuccess(); + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/FillData.cs b/examples/src/BasicExample/FillData.cs new file mode 100644 index 00000000..b4f2fb48 --- /dev/null +++ b/examples/src/BasicExample/FillData.cs @@ -0,0 +1,201 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading.Tasks; +using Ydb.Sdk.Services.Table; +using Ydb.Sdk.Value; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + // Fill sample tables with initial data. + private async Task FillData() + { + var response = await Client.SessionExec(async session => + { + var query = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $seriesData AS List>; + + DECLARE $seasonsData AS List>; + + DECLARE $episodesData AS List>; + + REPLACE INTO series + SELECT * FROM AS_TABLE($seriesData); + + REPLACE INTO seasons + SELECT * FROM AS_TABLE($seasonsData); + + REPLACE INTO episodes + SELECT * FROM AS_TABLE($episodesData); + "; + + return await session.ExecuteDataQuery( + query: query, + txControl: TxControl.BeginSerializableRW().Commit(), + parameters: GetDataParams(), + settings: DefaultDataQuerySettings + ); + }); + + response.Status.EnsureSuccess(); + } + + internal record Series(int SeriesId, string Title, DateTime ReleaseDate, string Info); + + internal record Season(int SeriesId, int SeasonId, string Title, DateTime FirstAired, DateTime LastAired); + + internal record Episode(int SeriesId, int SeasonId, int EpisodeId, string Title, DateTime AirDate); + + private static Dictionary GetDataParams() + { + var series = new Series[] + { + new(SeriesId: 1, Title: "IT Crowd", ReleaseDate: DateTime.Parse("2006-02-03"), + Info: "The IT Crowd is a British sitcom produced by Channel 4, written by Graham Linehan, " + + "produced by Ash Atalla and starring Chris O'Dowd, Richard Ayoade, Katherine Parkinson, " + + "and Matt Berry."), + new(SeriesId: 2, Title: "Silicon Valley", ReleaseDate: DateTime.Parse("2014-04-06"), + Info: "Silicon Valley is an American comedy television series created by Mike Judge, " + + "John Altschuler and Dave Krinsky. The series focuses on five young men who founded " + + "a startup company in Silicon Valley.") + }; + + var seasons = new Season[] + { + new(1, 1, "Season 1", DateTime.Parse("2006-02-03"), DateTime.Parse("2006-03-03")), + new(1, 2, "Season 2", DateTime.Parse("2007-08-24"), DateTime.Parse("2007-09-28")), + new(1, 3, "Season 3", DateTime.Parse("2008-11-21"), DateTime.Parse("2008-12-26")), + new(1, 4, "Season 4", DateTime.Parse("2010-06-25"), DateTime.Parse("2010-07-30")), + new(2, 1, "Season 1", DateTime.Parse("2014-04-06"), DateTime.Parse("2014-06-01")), + new(2, 2, "Season 2", DateTime.Parse("2015-04-12"), DateTime.Parse("2015-06-14")), + new(2, 3, "Season 3", DateTime.Parse("2016-04-24"), DateTime.Parse("2016-06-26")), + new(2, 4, "Season 4", DateTime.Parse("2017-04-23"), DateTime.Parse("2017-06-25")), + new(2, 5, "Season 5", DateTime.Parse("2018-03-25"), DateTime.Parse("2018-05-13")) + }; + + var episodes = new Episode[] + { + new(1, 1, 1, "Yesterday's Jam", DateTime.Parse("2006-02-03")), + new(1, 1, 2, "Calamity Jen", DateTime.Parse("2006-02-03")), + new(1, 1, 3, "Fifty-Fifty", DateTime.Parse("2006-02-10")), + new(1, 1, 4, "The Red Door", DateTime.Parse("2006-02-17")), + new(1, 1, 5, "The Haunting of Bill Crouse", DateTime.Parse("2006-02-24")), + new(1, 1, 6, "Aunt Irma Visits", DateTime.Parse("2006-03-03")), + new(1, 2, 1, "The Work Outing", DateTime.Parse("2006-08-24")), + new(1, 2, 2, "Return of the Golden Child", DateTime.Parse("2007-08-31")), + new(1, 2, 3, "Moss and the German", DateTime.Parse("2007-09-07")), + new(1, 2, 4, "The Dinner Party", DateTime.Parse("2007-09-14")), + new(1, 2, 5, "Smoke and Mirrors", DateTime.Parse("2007-09-21")), + new(1, 2, 6, "Men Without Women", DateTime.Parse("2007-09-28")), + new(1, 3, 1, "From Hell", DateTime.Parse("2008-11-21")), + new(1, 3, 2, "Are We Not Men?", DateTime.Parse("2008-11-28")), + new(1, 3, 3, "Tramps Like Us", DateTime.Parse("2008-12-05")), + new(1, 3, 4, "The Speech", DateTime.Parse("2008-12-12")), + new(1, 3, 5, "Friendface", DateTime.Parse("2008-12-19")), + new(1, 3, 6, "Calendar Geeks", DateTime.Parse("2008-12-26")), + new(1, 4, 1, "Jen The Fredo", DateTime.Parse("2010-06-25")), + new(1, 4, 2, "The Final Countdown", DateTime.Parse("2010-07-02")), + new(1, 4, 3, "Something Happened", DateTime.Parse("2010-07-09")), + new(1, 4, 4, "Italian For Beginners", DateTime.Parse("2010-07-16")), + new(1, 4, 5, "Bad Boys", DateTime.Parse("2010-07-23")), + new(1, 4, 6, "Reynholm vs Reynholm", DateTime.Parse("2010-07-30")), + new(2, 1, 1, "Minimum Viable Product", DateTime.Parse("2014-04-06")), + new(2, 1, 2, "The Cap Table", DateTime.Parse("2014-04-13")), + new(2, 1, 3, "Articles of Incorporation", DateTime.Parse("2014-04-20")), + new(2, 1, 4, "Fiduciary Duties", DateTime.Parse("2014-04-27")), + new(2, 1, 5, "Signaling Risk", DateTime.Parse("2014-05-04")), + new(2, 1, 6, "Third Party Insourcing", DateTime.Parse("2014-05-11")), + new(2, 1, 7, "Proof of Concept", DateTime.Parse("2014-05-18")), + new(2, 1, 8, "Optimal Tip-to-Tip Efficiency", DateTime.Parse("2014-06-01")), + new(2, 2, 1, "Sand Hill Shuffle", DateTime.Parse("2015-04-12")), + new(2, 2, 2, "Runaway Devaluation", DateTime.Parse("2015-04-19")), + new(2, 2, 3, "Bad Money", DateTime.Parse("2015-04-26")), + new(2, 2, 4, "The Lady", DateTime.Parse("2015-05-03")), + new(2, 2, 5, "Server Space", DateTime.Parse("2015-05-10")), + new(2, 2, 6, "Homicide", DateTime.Parse("2015-05-17")), + new(2, 2, 7, "Adult Content", DateTime.Parse("2015-05-24")), + new(2, 2, 8, "White Hat/Black Hat", DateTime.Parse("2015-05-31")), + new(2, 2, 9, "Binding Arbitration", DateTime.Parse("2015-06-07")), + new(2, 2, 10, "Two Days of the Condor", DateTime.Parse("2015-06-14")), + new(2, 3, 1, "Founder Friendly", DateTime.Parse("2016-04-24")), + new(2, 3, 2, "Two in the Box", DateTime.Parse("2016-05-01")), + new(2, 3, 3, "Meinertzhagen's Haversack", DateTime.Parse("2016-05-08")), + new(2, 3, 4, "Maleant Data Systems Solutions", DateTime.Parse("2016-05-15")), + new(2, 3, 5, "The Empty Chair", DateTime.Parse("2016-05-22")), + new(2, 3, 6, "Bachmanity Insanity", DateTime.Parse("2016-05-29")), + new(2, 3, 7, "To Build a Better Beta", DateTime.Parse("2016-06-05")), + new(2, 3, 8, "Bachman's Earnings Over-Ride", DateTime.Parse("2016-06-12")), + new(2, 3, 9, "Daily Active Users", DateTime.Parse("2016-06-19")), + new(2, 3, 10, "The Uptick", DateTime.Parse("2016-06-26")), + new(2, 4, 1, "Success Failure", DateTime.Parse("2017-04-23")), + new(2, 4, 2, "Terms of Service", DateTime.Parse("2017-04-30")), + new(2, 4, 3, "Intellectual Property", DateTime.Parse("2017-05-07")), + new(2, 4, 4, "Teambuilding Exercise", DateTime.Parse("2017-05-14")), + new(2, 4, 5, "The Blood Boy", DateTime.Parse("2017-05-21")), + new(2, 4, 6, "Customer Service", DateTime.Parse("2017-05-28")), + new(2, 4, 7, "The Patent Troll", DateTime.Parse("2017-06-04")), + new(2, 4, 8, "The Keenan Vortex", DateTime.Parse("2017-06-11")), + new(2, 4, 9, "Hooli-Con", DateTime.Parse("2017-06-18")), + new(2, 4, 10, "Server Error", DateTime.Parse("2017-06-25")), + new(2, 5, 1, "Grow Fast or Die Slow", DateTime.Parse("2018-03-25")), + new(2, 5, 2, "Reorientation", DateTime.Parse("2018-04-01")), + new(2, 5, 3, "Chief Operating Officer", DateTime.Parse("2018-04-08")), + new(2, 5, 4, "Tech Evangelist", DateTime.Parse("2018-04-15")), + new(2, 5, 5, "Facial Recognition", DateTime.Parse("2018-04-22")), + new(2, 5, 6, "Artificial Emotional Intelligence", DateTime.Parse("2018-04-29")), + new(2, 5, 7, "Initial Coin Offering", DateTime.Parse("2018-05-06")), + new(2, 5, 8, "Fifty-One Percent", DateTime.Parse("2018-05-13")) + }; + + var seriesData = series.Select(s => YdbValue.MakeStruct(new Dictionary + { + { "series_id", YdbValue.MakeUint64((ulong)s.SeriesId) }, + { "title", YdbValue.MakeUtf8(s.Title) }, + { "series_info", YdbValue.MakeUtf8(s.Info) }, + { "release_date", YdbValue.MakeDate(s.ReleaseDate) } + })).ToList(); + + var seasonsData = seasons.Select(s => YdbValue.MakeStruct(new Dictionary + { + { "series_id", YdbValue.MakeUint64((ulong)s.SeriesId) }, + { "season_id", YdbValue.MakeUint64((ulong)s.SeasonId) }, + { "title", YdbValue.MakeUtf8(s.Title) }, + { "first_aired", YdbValue.MakeDate(s.FirstAired) }, + { "last_aired", YdbValue.MakeDate(s.LastAired) } + })).ToList(); + + var episodesData = episodes.Select(e => YdbValue.MakeStruct(new Dictionary + { + { "series_id", YdbValue.MakeUint64((ulong)e.SeriesId) }, + { "season_id", YdbValue.MakeUint64((ulong)e.SeasonId) }, + { "episode_id", YdbValue.MakeUint64((ulong)e.EpisodeId) }, + { "title", YdbValue.MakeUtf8(e.Title) }, + { "air_date", YdbValue.MakeDate(e.AirDate) } + })).ToList(); + + return new Dictionary + { + { "$seriesData", YdbValue.MakeList(seriesData) }, + { "$seasonsData", YdbValue.MakeList(seasonsData) }, + { "$episodesData", YdbValue.MakeList(episodesData) } + }; + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/InteractiveTx.cs b/examples/src/BasicExample/InteractiveTx.cs new file mode 100644 index 00000000..cfeca9ff --- /dev/null +++ b/examples/src/BasicExample/InteractiveTx.cs @@ -0,0 +1,75 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Ydb.Sdk.Services.Table; +using Ydb.Sdk.Value; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + private async Task InteractiveTx() + { + var execResponse = await Client.SessionExec(async session => + { + var query1 = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $series_id AS Uint64; + DECLARE $season_id AS Uint64; + + SELECT first_aired FROM seasons + WHERE series_id = $series_id AND season_id = $season_id; + "; + + // Execute first query (no transaction commit) + var response = await session.ExecuteDataQuery( + query: query1, + txControl: TxControl.BeginSerializableRW(), + parameters: new Dictionary + { + { "$series_id", YdbValue.MakeUint64(1) }, + { "$season_id", YdbValue.MakeUint64(3) } + }, + settings: DefaultCachedDataQuerySettings + ); + + if (!response.Status.IsSuccess || response.Tx is null) + { + return response; + } + + // Perform some client logic + var firstAired = (DateTime?)response.Result.ResultSets[0].Rows[0]["first_aired"]; + var newAired = firstAired!.Value.AddDays(2); + + var query2 = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $series_id AS Uint64; + DECLARE $season_id AS Uint64; + DECLARE $air_date AS Date; + + UPSERT INTO seasons (series_id, season_id, first_aired) VALUES + ($series_id, $season_id, $air_date); + "; + + // Execute second query and commit transaction. + response = await session.ExecuteDataQuery( + query: query2, + TxControl.Tx(response.Tx).Commit(), + parameters: new Dictionary + { + { "$series_id", YdbValue.MakeUint64(1) }, + { "$season_id", YdbValue.MakeUint64(3) }, + { "$air_date", YdbValue.MakeDate(newAired) } + }, + settings: DefaultCachedDataQuerySettings + ); + + return response; + }); + + execResponse.Status.EnsureSuccess(); + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/Program.cs b/examples/src/BasicExample/Program.cs new file mode 100644 index 00000000..e3e5b38d --- /dev/null +++ b/examples/src/BasicExample/Program.cs @@ -0,0 +1,59 @@ +using System; +using System.Threading.Tasks; +using CommandLine; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; + +namespace Ydb.Sdk.Examples; + +internal class CmdOptions +{ + [Option('e', "endpoint", Required = true, HelpText = "Database endpoint")] + public string Endpoint { get; set; } = ""; + + [Option('d', "database", Required = true, HelpText = "Database name")] + public string Database { get; set; } = ""; + + [Option('p', "path", HelpText = "Base path for tables")] + public string Path { get; set; } = "ydb-dotnet-basic"; + + [Option("anonymous", Required = false, HelpText = "Fallback anonymous")] + public bool FallbackAnonymous { get; set; } = false; +} + +internal static class Program +{ + private static ServiceProvider GetServiceProvider() + { + return new ServiceCollection() + .AddLogging(configure => configure.AddConsole().SetMinimumLevel(LogLevel.Information)) + .BuildServiceProvider(); + } + + private static async Task Run(CmdOptions cmdOptions) + { + await using var serviceProvider = GetServiceProvider(); + var loggerFactory = serviceProvider.GetService(); + + loggerFactory ??= NullLoggerFactory.Instance; + + await BasicExample.Run( + endpoint: cmdOptions.Endpoint, + database: cmdOptions.Database, + credentialsProvider: await AuthUtils.MakeCredentialsFromEnv( + fallbackAnonymous: cmdOptions.FallbackAnonymous, + loggerFactory: loggerFactory), + customServerCertificate: AuthUtils.GetCustomServerCertificate(), + path: cmdOptions.Path, + loggerFactory: loggerFactory + ); + } + + private static async Task Main(string[] args) + { + AppContext.SetSwitch("System.Net.Http.SocketsHttpHandler.Http2UnencryptedSupport", true); + + await Parser.Default.ParseArguments(args).WithParsedAsync(Run); + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/ReadTable.cs b/examples/src/BasicExample/ReadTable.cs new file mode 100644 index 00000000..c74e4f09 --- /dev/null +++ b/examples/src/BasicExample/ReadTable.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Ydb.Sdk.Services.Table; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + private async Task ReadTable() + { + var readStream = Client.ReadTable( + FullTablePath("seasons"), + new ReadTableSettings + { + Columns = new List { "series_id", "season_id", "first_aired" }, + RowLimit = 5, + Ordered = true + }); + + while (await readStream.Next()) + { + readStream.Response.EnsureSuccess(); + var resultSet = readStream.Response.Result.ResultSet; + + foreach (var row in resultSet.Rows) + { + Console.WriteLine($"> ReadTable seasons, " + + $"series_id: {(ulong?)row["series_id"]}, " + + $"season_id: {(ulong?)row["season_id"]}, " + + $"first_aired: {(DateTime?)row["first_aired"]}"); + } + } + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/ScanQuery.cs b/examples/src/BasicExample/ScanQuery.cs new file mode 100644 index 00000000..22ca3fee --- /dev/null +++ b/examples/src/BasicExample/ScanQuery.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Ydb.Sdk.Value; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + private async Task ScanQuery(DateTime airFrom) + { + var query = @$" + PRAGMA TablePathPrefix('{BasePath}'); + + DECLARE $air_from AS Date; + + SELECT series_id, season_id, COUNT(*) AS episodes_count + FROM episodes + WHERE air_date >= $air_from + GROUP BY series_id, season_id + ORDER BY series_id, season_id; + "; + + var scanStream = Client.ExecuteScanQuery( + query, + new Dictionary + { + { "$air_from", YdbValue.MakeDate(airFrom) } + }); + + while (await scanStream.Next()) + { + scanStream.Response.EnsureSuccess(); + + var resultSet = scanStream.Response.Result.ResultSetPart; + if (resultSet != null) + { + foreach (var row in resultSet.Rows) + { + Console.WriteLine($"> ScanQuery, " + + $"series_id: {(ulong?)row["series_id"]}, " + + $"season_id: {(ulong?)row["season_id"]}, " + + $"episodes_count: {(ulong)row["episodes_count"]}"); + } + } + } + } +} \ No newline at end of file diff --git a/examples/src/BasicExample/SchemeQuery.cs b/examples/src/BasicExample/SchemeQuery.cs new file mode 100644 index 00000000..216ef271 --- /dev/null +++ b/examples/src/BasicExample/SchemeQuery.cs @@ -0,0 +1,43 @@ +using System.Threading.Tasks; + +namespace Ydb.Sdk.Examples; + +internal partial class BasicExample +{ + // Execute Scheme (DDL) query to create sample tables. + private async Task SchemeQuery() + { + var response = await Client.SessionExec(async session => + await session.ExecuteSchemeQuery(@$" + PRAGMA TablePathPrefix('{BasePath}'); + + CREATE TABLE series ( + series_id Uint64, + title Utf8, + series_info Utf8, + release_date Date, + PRIMARY KEY (series_id) + ); + + CREATE TABLE seasons ( + series_id Uint64, + season_id Uint64, + title Utf8, + first_aired Date, + last_aired Date, + PRIMARY KEY (series_id, season_id) + ); + + CREATE TABLE episodes ( + series_id Uint64, + season_id Uint64, + episode_id Uint64, + title Utf8, + air_date Date, + PRIMARY KEY (series_id, season_id, episode_id) + ); + ")); + + response.Status.EnsureSuccess(); + } +} \ No newline at end of file diff --git a/examples/src/Common/AuthUtils.cs b/examples/src/Common/AuthUtils.cs new file mode 100644 index 00000000..5a9824d5 --- /dev/null +++ b/examples/src/Common/AuthUtils.cs @@ -0,0 +1,67 @@ +using System; +using System.Security.Cryptography.X509Certificates; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Ydb.Sdk.Auth; +using Ydb.Sdk.Yc; + +namespace Ydb.Sdk.Examples; + +public static class AuthUtils +{ + public static async Task MakeCredentialsFromEnv( + bool fallbackAnonymous = false, + ILoggerFactory? loggerFactory = null) + { + var saFileValue = Environment.GetEnvironmentVariable("YDB_SERVICE_ACCOUNT_KEY_FILE_CREDENTIALS"); + if (!string.IsNullOrEmpty(saFileValue)) + { + var saProvider = new ServiceAccountProvider( + saFilePath: saFileValue, + loggerFactory: loggerFactory); + await saProvider.Initialize(); + return saProvider; + } + + var anonymousValue = Environment.GetEnvironmentVariable("YDB_ANONYMOUS_CREDENTIALS"); + if (anonymousValue != null && IsTrueValue(anonymousValue)) + { + return new AnonymousProvider(); + } + + var metadataValue = Environment.GetEnvironmentVariable("YDB_METADATA_CREDENTIALS"); + if (metadataValue != null && IsTrueValue(metadataValue)) + { + var metadataProvider = new MetadataProvider( + loggerFactory: loggerFactory); + await metadataProvider.Initialize(); + return metadataProvider; + } + + var tokenValue = Environment.GetEnvironmentVariable("YDB_ACCESS_TOKEN_CREDENTIALS"); + if (!string.IsNullOrEmpty(tokenValue)) + { + return new TokenProvider(tokenValue); + } + + if (fallbackAnonymous) + { + return new AnonymousProvider(); + } + + throw new InvalidOperationException("Failed to parse credentials from environmet, no valid options found."); + } + + public static X509Certificate GetCustomServerCertificate() + { + return YcCerts.GetDefaultServerCertificate(); + } + + private static bool IsTrueValue(string value) + { + return + value == "1" || + value.ToLower() == "yes" || + value.ToLower() == "true"; + } +} \ No newline at end of file diff --git a/examples/src/Common/Common.csproj b/examples/src/Common/Common.csproj new file mode 100644 index 00000000..7c6e5df5 --- /dev/null +++ b/examples/src/Common/Common.csproj @@ -0,0 +1,23 @@ + + + + net6.0 + Ydb.Sdk.Examples.Common + Ydb.Sdk.Examples + enable + + + + git + https://github.com/ydb-platform/ydb-dotnet-examples + https://github.com/ydb-platform/ydb-dotnet-examples + YANDEX LLC + + + + + + + + + diff --git a/examples/src/Common/TableExampleBase.cs b/examples/src/Common/TableExampleBase.cs new file mode 100644 index 00000000..5e12f1c3 --- /dev/null +++ b/examples/src/Common/TableExampleBase.cs @@ -0,0 +1,20 @@ +using Ydb.Sdk.Services.Table; + +namespace Ydb.Sdk.Examples; + +public class TableExampleBase +{ + protected TableClient Client { get; } + protected string BasePath { get; } + + protected TableExampleBase(TableClient client, string database, string path) + { + Client = client; + BasePath = string.Join('/', database, path); + } + + protected string FullTablePath(string table) + { + return string.Join('/', BasePath, table); + } +} \ No newline at end of file diff --git a/examples/src/YdbExamples.sln b/examples/src/YdbExamples.sln new file mode 100644 index 00000000..926c9a52 --- /dev/null +++ b/examples/src/YdbExamples.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 16 +VisualStudioVersion = 16.0.31205.134 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Common", "Common\Common.csproj", "{59F508A9-5EE0-4A6C-9580-89FC8C6CD4CE}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "BasicExample", "BasicExample\BasicExample.csproj", "{9DAD5FF3-B7C2-4A9E-B4B2-A0FBD6097727}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {59F508A9-5EE0-4A6C-9580-89FC8C6CD4CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {59F508A9-5EE0-4A6C-9580-89FC8C6CD4CE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {59F508A9-5EE0-4A6C-9580-89FC8C6CD4CE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {59F508A9-5EE0-4A6C-9580-89FC8C6CD4CE}.Release|Any CPU.Build.0 = Release|Any CPU + {9DAD5FF3-B7C2-4A9E-B4B2-A0FBD6097727}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9DAD5FF3-B7C2-4A9E-B4B2-A0FBD6097727}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9DAD5FF3-B7C2-4A9E-B4B2-A0FBD6097727}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9DAD5FF3-B7C2-4A9E-B4B2-A0FBD6097727}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {1E75A264-F26E-469D-AD5C-FBBA4C34F249} + EndGlobalSection +EndGlobal diff --git a/examples/src/YdbExamples.sln.DotSettings b/examples/src/YdbExamples.sln.DotSettings new file mode 100644 index 00000000..0625ed27 --- /dev/null +++ b/examples/src/YdbExamples.sln.DotSettings @@ -0,0 +1,69 @@ + + <?xml version="1.0" encoding="utf-16"?><Profile name="Custom Cleanup"><CppCodeStyleCleanupDescriptor ArrangeBraces="True" ArrangeAuto="True" ArrangeFunctionDeclarations="True" ArrangeNestedNamespaces="True" ArrangeTypeAliases="True" ArrangeCVQualifiers="True" ArrangeSlashesInIncludeDirectives="True" ArrangeOverridingFunctions="True" SortIncludeDirectives="True" SortMemberInitializers="True" /><CppReformatCode>True</CppReformatCode><CSCodeStyleAttributes ArrangeVarStyle="True" ArrangeTypeAccessModifier="True" ArrangeTypeMemberAccessModifier="True" SortModifiers="True" AddMissingParentheses="True" ArrangeAttributes="True" ArrangeCodeBodyStyle="True" ArrangeTrailingCommas="True" ArrangeObjectCreation="True" ArrangeDefaultValue="True" RemoveRedundantParentheses="True" ArrangeNamespaces="True" /><FSReformatCode>True</FSReformatCode><ShaderLabReformatCode>True</ShaderLabReformatCode><Xaml.RedundantFreezeAttribute>True</Xaml.RedundantFreezeAttribute><Xaml.RemoveRedundantModifiersAttribute>True</Xaml.RemoveRedundantModifiersAttribute><Xaml.RemoveRedundantNameAttribute>True</Xaml.RemoveRedundantNameAttribute><Xaml.RemoveRedundantResource>True</Xaml.RemoveRedundantResource><Xaml.RemoveRedundantCollectionProperty>True</Xaml.RemoveRedundantCollectionProperty><Xaml.RemoveRedundantAttachedPropertySetter>True</Xaml.RemoveRedundantAttachedPropertySetter><Xaml.RemoveRedundantStyledValue>True</Xaml.RemoveRedundantStyledValue><Xaml.RemoveRedundantNamespaceAlias>True</Xaml.RemoveRedundantNamespaceAlias><Xaml.RemoveForbiddenResourceName>True</Xaml.RemoveForbiddenResourceName><Xaml.RemoveRedundantGridDefinitionsAttribute>True</Xaml.RemoveRedundantGridDefinitionsAttribute><Xaml.RemoveRedundantUpdateSourceTriggerAttribute>True</Xaml.RemoveRedundantUpdateSourceTriggerAttribute><Xaml.RemoveRedundantBindingModeAttribute>True</Xaml.RemoveRedundantBindingModeAttribute><Xaml.RemoveRedundantGridSpanAttribut>True</Xaml.RemoveRedundantGridSpanAttribut><XMLReformatCode>True</XMLReformatCode><CSArrangeQualifiers>True</CSArrangeQualifiers><CSFixBuiltinTypeReferences>True</CSFixBuiltinTypeReferences><HtmlReformatCode>True</HtmlReformatCode><VBReformatCode>True</VBReformatCode><CSReformatCode>True</CSReformatCode><FormatAttributeQuoteDescriptor>True</FormatAttributeQuoteDescriptor><IDEA_SETTINGS>&lt;profile version="1.0"&gt; + &lt;option name="myName" value="Custom Cleanup" /&gt; + &lt;inspection_tool class="ES6ShorthandObjectProperty" enabled="false" level="INFORMATION" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSArrowFunctionBracesCanBeRemoved" enabled="false" level="INFORMATION" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSPrimitiveTypeWrapperUsage" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSRemoveUnnecessaryParentheses" enabled="false" level="INFORMATION" enabled_by_default="false" /&gt; + &lt;inspection_tool class="JSUnnecessarySemicolon" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="TypeScriptExplicitMemberType" enabled="false" level="INFORMATION" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryContinueJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelOnBreakStatementJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryLabelOnContinueStatementJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="UnnecessaryReturnJS" enabled="false" level="WARNING" enabled_by_default="false" /&gt; + &lt;inspection_tool class="WrongPropertyKeyValueDelimiter" enabled="false" level="WEAK WARNING" enabled_by_default="false" /&gt; +&lt;/profile&gt;</IDEA_SETTINGS><RIDER_SETTINGS>&lt;profile&gt; + &lt;Language id="CSS"&gt; + &lt;Rearrange&gt;false&lt;/Rearrange&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="EditorConfig"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="HTML"&gt; + &lt;Rearrange&gt;false&lt;/Rearrange&gt; + &lt;OptimizeImports&gt;false&lt;/OptimizeImports&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="HTTP Request"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Handlebars"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Ini"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="JSON"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Jade"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="JavaScript"&gt; + &lt;Rearrange&gt;false&lt;/Rearrange&gt; + &lt;OptimizeImports&gt;false&lt;/OptimizeImports&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Markdown"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="Properties"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="RELAX-NG"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="SQL"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="XML"&gt; + &lt;Rearrange&gt;false&lt;/Rearrange&gt; + &lt;OptimizeImports&gt;false&lt;/OptimizeImports&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; + &lt;Language id="yaml"&gt; + &lt;Reformat&gt;true&lt;/Reformat&gt; + &lt;/Language&gt; +&lt;/profile&gt;</RIDER_SETTINGS><CSUseAutoProperty>True</CSUseAutoProperty><CSMakeFieldReadonly>True</CSMakeFieldReadonly><CSMakeAutoPropertyGetOnly>True</CSMakeAutoPropertyGetOnly><RemoveCodeRedundancies>True</RemoveCodeRedundancies><CSOptimizeUsings><OptimizeUsings>True</OptimizeUsings></CSOptimizeUsings><CSShortenReferences>True</CSShortenReferences></Profile> \ No newline at end of file diff --git a/slo/.gitignore b/slo/.gitignore new file mode 100644 index 00000000..00868c0f --- /dev/null +++ b/slo/.gitignore @@ -0,0 +1,5 @@ +data/ +.idea/ +bin/ +obj/ + diff --git a/slo/playground/README.md b/slo/playground/README.md new file mode 100644 index 00000000..eefb5cf0 --- /dev/null +++ b/slo/playground/README.md @@ -0,0 +1,40 @@ +# SLO playground + +Playground may be used for testing SLO workloads locally + +It has several services: + +- `prometheus` - storage for metrics +- `prometheus-pushgateway` - push acceptor for prometheus +- `grafana` - provides chats for metrics +- `ydb` - local instance of ydb-database to run workload with + +## Network addresses + +- Grafana dashboard: http://localhost:3000 +- Prometheus pushgateway: http://localhost:9091 +- YDB monitoring: http://localhost:8765 +- YDB GRPC: grpc://localhost:2136 +- YDB GRPC TLS: grpcs://localhost:2135 + +## Start + +```shell +docker-compose up -d +``` + +## Stop + +```shell +docker-compose down +``` + +## Configs + +Grafana's dashboards stored in `configs/grafana/provisioning/dashboards` + +## Data + +YDB databases are not persistent + +All other data like metrics and certs stored in `data/` \ No newline at end of file diff --git a/slo/playground/configs/grafana/provisioning/dashboards/dashboard.yml b/slo/playground/configs/grafana/provisioning/dashboards/dashboard.yml new file mode 100644 index 00000000..c6784142 --- /dev/null +++ b/slo/playground/configs/grafana/provisioning/dashboards/dashboard.yml @@ -0,0 +1,6 @@ +apiVersion: 1 + +providers: + - name: 'SLO' + options: + path: /etc/grafana/provisioning/dashboards diff --git a/slo/playground/configs/grafana/provisioning/dashboards/slo.json b/slo/playground/configs/grafana/provisioning/dashboards/slo.json new file mode 100644 index 00000000..69d76bf7 --- /dev/null +++ b/slo/playground/configs/grafana/provisioning/dashboards/slo.json @@ -0,0 +1,646 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { + "type": "grafana", + "uid": "-- Grafana --" + }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "target": { + "limit": 100, + "matchAny": false, + "tags": [], + "type": "dashboard" + }, + "type": "dashboard" + } + ] + }, + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "links": [], + "liveNow": false, + "panels": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 12, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "rate(oks[$__rate_interval]) > 0", + "hide": false, + "legendFormat": "({{sdk}}-{{sdkVersion}}) {{jobName}} OK", + "range": true, + "refId": "B" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "rate(not_oks[$__rate_interval]) > 0", + "hide": false, + "legendFormat": "({{sdk}}-{{sdkVersion}}) {{jobName}} not OK", + "range": true, + "refId": "C" + } + ], + "title": "SLO Requests RPS", + "transformations": [], + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "axisSoftMin": 0, + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "decimals": 0, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 9, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 14, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "histogram_quantile(1, rate(attempts_bucket[$__rate_interval]))", + "hide": false, + "legendFormat": "{{sdk}}-{{sdkVersion}} {{jobName}}-{{status}}", + "range": true, + "refId": "A" + } + ], + "title": "Attempts", + "transformations": [], + "type": "timeseries" + }, + { + "collapsed": false, + "gridPos": { + "h": 1, + "w": 24, + "x": 0, + "y": 9 + }, + "id": 7, + "panels": [], + "title": "Latencies", + "type": "row" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 10 + }, + "id": 4, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "pluginVersion": "9.3.1", + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "latency{jobName=\"read\", status=\"ok\"} > 0", + "legendFormat": "{{sdk}}-{{sdkVersion}}-p{{quantile}}", + "range": true, + "refId": "A" + } + ], + "title": "Read Latencies (OK)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 10 + }, + "id": 5, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "latency{jobName=\"write\", status=\"ok\"} > 0", + "legendFormat": "{{sdk}}-{{sdkVersion}}-p{{quantile}}", + "range": true, + "refId": "A" + } + ], + "title": "Write Latencies (OK)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 18 + }, + "id": 10, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "latency{jobName=\"read\", status=\"err\"} > 0", + "legendFormat": "{{sdk}}-{{sdkVersion}}-p{{quantile}}", + "range": true, + "refId": "A" + } + ], + "title": "Read Latencies (NOT OK)", + "type": "timeseries" + }, + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "fieldConfig": { + "defaults": { + "color": { + "mode": "palette-classic" + }, + "custom": { + "axisCenteredZero": false, + "axisColorMode": "text", + "axisLabel": "", + "axisPlacement": "auto", + "barAlignment": 0, + "drawStyle": "line", + "fillOpacity": 0, + "gradientMode": "none", + "hideFrom": { + "legend": false, + "tooltip": false, + "viz": false + }, + "lineInterpolation": "linear", + "lineWidth": 1, + "pointSize": 5, + "scaleDistribution": { + "type": "linear" + }, + "showPoints": "auto", + "spanNulls": false, + "stacking": { + "group": "A", + "mode": "none" + }, + "thresholdsStyle": { + "mode": "off" + } + }, + "mappings": [], + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green", + "value": null + }, + { + "color": "red", + "value": 80 + } + ] + } + }, + "overrides": [] + }, + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 18 + }, + "id": 11, + "options": { + "legend": { + "calcs": [], + "displayMode": "list", + "placement": "bottom", + "showLegend": true + }, + "tooltip": { + "mode": "single", + "sort": "none" + } + }, + "targets": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "editorMode": "builder", + "expr": "latency{jobName=\"write\", status=\"err\"} > 0", + "legendFormat": "{{sdk}}-{{sdkVersion}}-p{{quantile}}", + "range": true, + "refId": "A" + } + ], + "title": "Write Latencies (NOT OK)", + "type": "timeseries" + } + ], + "refresh": "", + "revision": 1, + "schemaVersion": 38, + "style": "dark", + "tags": [], + "templating": { + "list": [ + { + "datasource": { + "type": "prometheus", + "uid": "prometheus" + }, + "filters": [], + "hide": 0, + "label": "", + "name": "filter", + "skipUrlSync": false, + "type": "adhoc" + } + ] + }, + "time": { + "from": "now-15m", + "to": "now" + }, + "timepicker": {}, + "timezone": "", + "title": "SLO", + "uid": "7CzMl5t4k", + "version": 1, + "weekStart": "" +} diff --git a/slo/playground/configs/grafana/provisioning/datasources/datasource.yml b/slo/playground/configs/grafana/provisioning/datasources/datasource.yml new file mode 100644 index 00000000..0b62b9c3 --- /dev/null +++ b/slo/playground/configs/grafana/provisioning/datasources/datasource.yml @@ -0,0 +1,11 @@ +apiVersion: 1 + +datasources: + - name: prometheus + type: prometheus + access: proxy + orgId: 1 + url: http://prometheus:9090 + basicAuth: false + isDefault: true + editable: true diff --git a/slo/playground/configs/prometheus/prometheus.yml b/slo/playground/configs/prometheus/prometheus.yml new file mode 100644 index 00000000..281b390b --- /dev/null +++ b/slo/playground/configs/prometheus/prometheus.yml @@ -0,0 +1,8 @@ +global: + scrape_interval: 1s + evaluation_interval: 1s + +scrape_configs: + - job_name: 'slo' + static_configs: + - targets: ['prometheus-pushgateway:9091'] diff --git a/slo/playground/docker-compose.yml b/slo/playground/docker-compose.yml new file mode 100644 index 00000000..dabdb5ba --- /dev/null +++ b/slo/playground/docker-compose.yml @@ -0,0 +1,124 @@ +version: '2.1' + +networks: + monitor-net: + driver: bridge + +services: + prometheus: + image: prom/prometheus:v2.44.0 + container_name: prometheus + user: "$UID:$GID" + volumes: + - ./configs/prometheus:/etc/prometheus + - ../data/prometheus:/prometheus + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--storage.tsdb.path=/prometheus' + - '--storage.tsdb.retention.time=200h' + - '--web.enable-lifecycle' + restart: unless-stopped + ports: + - "9090:9090" + networks: + - monitor-net + + prometheus-pushgateway: + image: prom/pushgateway:v1.6.0 + container_name: prometheus-pushgateway + ports: + - "9091:9091" + networks: + - monitor-net + + grafana: + image: grafana/grafana:9.5.3 + container_name: grafana + user: "$UID:$GID" + volumes: + - ./configs/grafana/provisioning:/etc/grafana/provisioning + - ../data/grafana:/var/lib/grafana + environment: + - GF_SECURITY_ADMIN_USER=admin + - GF_SECURITY_ADMIN_PASSWORD=passw0rD + restart: unless-stopped + ports: + - "3000:3000" + networks: + - monitor-net + + ydb: + image: cr.yandex/yc/yandex-docker-local-ydb:23.1 + container_name: ydb + environment: + - GRPC_TLS_PORT=2135 + - GRPC_PORT=2136 + - MON_PORT=8765 + - YDB_USE_IN_MEMORY_PDISKS=true + - YDB_DEFAULT_LOG_LEVEL=NOTICE + ports: + - "2135:2135" + - "2136:2136" + - "8765:8765" + volumes: + - ../data/ydb_certs:/ydb_certs + networks: + - monitor-net + + slo-create: + build: + context: ../.. + dockerfile: slo/src/Dockerfile + command: + - 'create' + - 'http://ydb:2136' + - '/local' + - '--table-name' + - 'slo-dotnet' + - '--min-partitions-count' + - '6' + - '--max-partitions-count' + - '1000' + - '--partition-size' + - '1' + - '--initial-data-count' + - '1000' + networks: + - monitor-net + depends_on: + ydb: + condition: service_healthy + + slo-run: + build: + context: ../.. + dockerfile: slo/src/Dockerfile + command: + - 'run' + - 'http://ydb:2136' + - '/local' + - '--prom-pgw' + - 'http://prometheus-pushgateway:9091' + - '--table-name' + - 'slo-dotnet' + networks: + - monitor-net + depends_on: + slo-create: + condition: service_completed_successfully + + slo-cleanup: + build: + context: ../.. + dockerfile: slo/src/Dockerfile + command: + - 'cleanup' + - 'http://ydb:2136' + - '/local' + - '--table-name' + - 'slo-dotnet' + networks: + - monitor-net + depends_on: + slo-run: + condition: service_completed_successfully diff --git a/slo/src/Cli/Cli.cs b/slo/src/Cli/Cli.cs new file mode 100644 index 00000000..f19dfc63 --- /dev/null +++ b/slo/src/Cli/Cli.cs @@ -0,0 +1,148 @@ +using System.CommandLine; + +namespace slo.Cli; + +internal static class Cli +{ + private static readonly Argument EndpointArgument = new( + "endpoint", + "YDB endpoint to connect to"); + + private static readonly Argument DbArgument = new( + "db", + "YDB database to connect to"); + + private static readonly Option TableOption = new( + new[] { "-t", "--table-name" }, + () => "testingTable", + "table name to create\n "); + + private static readonly Option WriteTimeoutOption = new( + "--write-timeout", + () => 10000, + "write timeout milliseconds"); + + + private static readonly Option MinPartitionsCountOption = new( + "--min-partitions-count", + () => 6, + "minimum amount of partitions in table"); + + private static readonly Option MaxPartitionsCountOption = new( + "--max-partitions-count", + () => 1000, + "maximum amount of partitions in table"); + + private static readonly Option PartitionSizeOption = new( + "--partition-size", + () => 1, + "partition size in mb"); + + private static readonly Option InitialDataCountOption = new( + new[] { "-c", "--initial-data-count" }, + () => 1000, + "amount of initially created rows"); + + + private static readonly Option PromPgwOption = new( + "--prom-pgw", + "minimum amount of partitions in table") { IsRequired = true }; + + private static readonly Option ReportPeriodOption = new( + "--report-period", + () => 250, + "prometheus push period in milliseconds"); + + private static readonly Option ReadRpsOption = new( + "--read-rps", + () => 1000, + "read RPS"); + + private static readonly Option ReadTimeoutOption = new( + "--read-timeout", + () => 10000, + "read timeout milliseconds"); + + private static readonly Option WriteRpsOption = new( + "--write-rps", + () => 100, + "write RPS"); + + private static readonly Option TimeOption = new( + "--time", + () => 140, + "run time in seconds"); + + private static readonly Option ShutdownTimeOption = new( + "--shutdown-time", + () => 30, + "time to wait before force kill workers"); + + private static readonly Command CreateCommand = new( + "create", + "creates table in database") + { + EndpointArgument, + DbArgument, + TableOption, + MinPartitionsCountOption, + MaxPartitionsCountOption, + PartitionSizeOption, + InitialDataCountOption, + WriteTimeoutOption + }; + + + private static readonly Command CleanupCommand = new( + "cleanup", + "drops table in database") + { + EndpointArgument, + DbArgument, + TableOption, + WriteTimeoutOption + }; + + private static readonly Command RunCommand = new( + "run", + "runs workload (read and write to table with sets RPS)") + { + EndpointArgument, + DbArgument, + TableOption, + InitialDataCountOption, + PromPgwOption, + ReportPeriodOption, + ReadRpsOption, + ReadTimeoutOption, + WriteRpsOption, + WriteTimeoutOption, + TimeOption, + ShutdownTimeOption + }; + + private static readonly RootCommand RootCommand = new("SLO app") + { + CreateCommand, CleanupCommand, RunCommand + }; + + internal static async Task Run(string[] args) + { + CreateCommand.SetHandler( + async createConfig => { await CliCommands.Create(createConfig); }, + new CreateConfigBinder(EndpointArgument, DbArgument, TableOption, MinPartitionsCountOption, + MaxPartitionsCountOption, PartitionSizeOption, InitialDataCountOption, WriteTimeoutOption) + ); + + CleanupCommand.SetHandler( + async cleanUpConfig => { await CliCommands.CleanUp(cleanUpConfig); }, + new CleanUpConfigBinder(EndpointArgument, DbArgument, TableOption, WriteTimeoutOption) + ); + + RunCommand.SetHandler(async runConfig => { await CliCommands.Run(runConfig); }, + new RunConfigBinder(EndpointArgument, DbArgument, TableOption, InitialDataCountOption, PromPgwOption, + ReportPeriodOption, ReadRpsOption, ReadTimeoutOption, WriteRpsOption, WriteTimeoutOption, TimeOption, + ShutdownTimeOption)); + return await RootCommand.InvokeAsync(args); + } +} \ No newline at end of file diff --git a/slo/src/Cli/CliCommands.cs b/slo/src/Cli/CliCommands.cs new file mode 100644 index 00000000..f7251be3 --- /dev/null +++ b/slo/src/Cli/CliCommands.cs @@ -0,0 +1,96 @@ +using Prometheus; +using slo.Jobs; + +namespace slo.Cli; + +public static class CliCommands +{ + internal static async Task Create(CreateConfig config) + { + Console.WriteLine(config); + + await using var client = await Client.CreateAsync(config.Endpoint, config.Db, config.TableName); + + const int maxCreateAttempts = 10; + for (var i = 0; i < maxCreateAttempts; i++) + { + try + { + await client.Init(config.InitialDataCount, + config.PartitionSize, + config.MinPartitionsCount, + config.MaxPartitionsCount, + TimeSpan.FromMilliseconds(config.WriteTimeout)); + break; + } + catch (Exception e) + { + Console.WriteLine(e); + Thread.Sleep(millisecondsTimeout: 1000); + } + } + } + + internal static async Task CleanUp(CleanUpConfig config) + { + Console.WriteLine(config); + + await using var client = await Client.CreateAsync(config.Endpoint, config.Db, config.TableName); + + await client.CleanUp(TimeSpan.FromMilliseconds(config.WriteTimeout)); + } + + internal static async Task Run(RunConfig config) + { + var promPgwEndpoint = $"{config.PromPgw}/metrics"; + const string job = "workload-dotnet"; + + await using var client = await Client.CreateAsync(config.Endpoint, config.Db, config.TableName); + + await client.Init(config.InitialDataCount, 1, 6, 1000, TimeSpan.FromMilliseconds(config.WriteTimeout)); + + Console.WriteLine(config.PromPgw); + + await MetricReset(promPgwEndpoint, job); + using var prometheus = new MetricPusher(promPgwEndpoint, job, intervalMilliseconds: config.ReportPeriod); + + prometheus.Start(); + + var duration = TimeSpan.FromSeconds(config.Time); + + var readJob = new ReadJob( + client, + new RateLimitedCaller( + config.ReadRps, + duration + ), + TimeSpan.FromMilliseconds(config.ReadTimeout)); + + var writeJob = new WriteJob( + client, + new RateLimitedCaller( + config.WriteRps, + duration + ), + TimeSpan.FromMilliseconds(config.WriteTimeout)); + + var readThread = new Thread(readJob.Start); + var writeThread = new Thread(writeJob.Start); + + readThread.Start(); + writeThread.Start(); + await Task.Delay(duration + TimeSpan.FromSeconds(config.ShutdownTime)); + readThread.Join(); + writeThread.Join(); + + await prometheus.StopAsync(); + await MetricReset(promPgwEndpoint, job); + } + + private static async Task MetricReset(string promPgwEndpoint, string job) + { + var deleteUri = $"{promPgwEndpoint}/job/{job}"; + using var httpClient = new HttpClient(); + await httpClient.DeleteAsync(deleteUri); + } +} \ No newline at end of file diff --git a/slo/src/Cli/ConfigBinders.cs b/slo/src/Cli/ConfigBinders.cs new file mode 100644 index 00000000..a4b32493 --- /dev/null +++ b/slo/src/Cli/ConfigBinders.cs @@ -0,0 +1,125 @@ +using System.CommandLine; +using System.CommandLine.Binding; + +namespace slo.Cli; + +internal class CreateConfigBinder : BinderBase +{ + private readonly Argument _dbArgument; + private readonly Argument _endpointArgument; + private readonly Option _initialDataCountOption; + private readonly Option _maxPartitionsCountOption; + private readonly Option _minPartitionsCountOption; + private readonly Option _partitionSizeOption; + private readonly Option _tableOption; + private readonly Option _writeTimeoutOption; + + public CreateConfigBinder(Argument endpointArgument, Argument dbArgument, + Option tableOption, Option minPartitionsCountOption, Option maxPartitionsCountOption, + Option partitionSizeOption, Option initialDataCountOption, Option writeTimeoutOption) + { + _endpointArgument = endpointArgument; + _dbArgument = dbArgument; + _tableOption = tableOption; + _minPartitionsCountOption = minPartitionsCountOption; + _maxPartitionsCountOption = maxPartitionsCountOption; + _partitionSizeOption = partitionSizeOption; + _initialDataCountOption = initialDataCountOption; + _writeTimeoutOption = writeTimeoutOption; + } + + protected override CreateConfig GetBoundValue(BindingContext bindingContext) + { + return new CreateConfig( + bindingContext.ParseResult.GetValueForArgument(_endpointArgument), + bindingContext.ParseResult.GetValueForArgument(_dbArgument), + bindingContext.ParseResult.GetValueForOption(_tableOption)!, + bindingContext.ParseResult.GetValueForOption(_minPartitionsCountOption), + bindingContext.ParseResult.GetValueForOption(_maxPartitionsCountOption), + bindingContext.ParseResult.GetValueForOption(_partitionSizeOption), + bindingContext.ParseResult.GetValueForOption(_initialDataCountOption), + bindingContext.ParseResult.GetValueForOption(_writeTimeoutOption) + ); + } +} + +internal class CleanUpConfigBinder : BinderBase +{ + private readonly Argument _dbArgument; + private readonly Argument _endpointArgument; + private readonly Option _tableOption; + private readonly Option _writeTimeoutOption; + + public CleanUpConfigBinder(Argument endpointArgument, Argument dbArgument, + Option tableOption, Option writeTimeoutOption) + { + _endpointArgument = endpointArgument; + _dbArgument = dbArgument; + _tableOption = tableOption; + _writeTimeoutOption = writeTimeoutOption; + } + + protected override CleanUpConfig GetBoundValue(BindingContext bindingContext) + { + return new CleanUpConfig( + bindingContext.ParseResult.GetValueForArgument(_endpointArgument), + bindingContext.ParseResult.GetValueForArgument(_dbArgument), + bindingContext.ParseResult.GetValueForOption(_tableOption)!, + bindingContext.ParseResult.GetValueForOption(_writeTimeoutOption) + ); + } +} + +internal class RunConfigBinder : BinderBase +{ + private readonly Argument _dbArgument; + private readonly Argument _endpointArgument; + private readonly Option _initialDataCountOption; + private readonly Option _promPgwOption; + private readonly Option _readRpsOption; + private readonly Option _readTimeoutOption; + private readonly Option _reportPeriodOption; + private readonly Option _shutdownTimeOption; + private readonly Option _tableOption; + private readonly Option _timeOption; + private readonly Option _writeRpsOption; + private readonly Option _writeTimeoutOption; + + public RunConfigBinder(Argument endpointArgument, Argument dbArgument, + Option tableOption, Option initialDataCountOption, Option promPgwOption, + Option reportPeriodOption, Option readRpsOption, Option readTimeoutOption, + Option writeRpsOption, Option writeTimeoutOption, Option timeOption, + Option shutdownTimeOption) + { + _endpointArgument = endpointArgument; + _dbArgument = dbArgument; + _tableOption = tableOption; + _initialDataCountOption = initialDataCountOption; + _promPgwOption = promPgwOption; + _reportPeriodOption = reportPeriodOption; + _readRpsOption = readRpsOption; + _readTimeoutOption = readTimeoutOption; + _writeRpsOption = writeRpsOption; + _writeTimeoutOption = writeTimeoutOption; + _timeOption = timeOption; + _shutdownTimeOption = shutdownTimeOption; + } + + protected override RunConfig GetBoundValue(BindingContext bindingContext) + { + return new RunConfig( + bindingContext.ParseResult.GetValueForArgument(_endpointArgument), + bindingContext.ParseResult.GetValueForArgument(_dbArgument), + bindingContext.ParseResult.GetValueForOption(_tableOption)!, + bindingContext.ParseResult.GetValueForOption(_initialDataCountOption), + bindingContext.ParseResult.GetValueForOption(_promPgwOption)!, + bindingContext.ParseResult.GetValueForOption(_reportPeriodOption), + bindingContext.ParseResult.GetValueForOption(_readRpsOption), + bindingContext.ParseResult.GetValueForOption(_readTimeoutOption), + bindingContext.ParseResult.GetValueForOption(_writeRpsOption), + bindingContext.ParseResult.GetValueForOption(_writeTimeoutOption), + bindingContext.ParseResult.GetValueForOption(_timeOption), + bindingContext.ParseResult.GetValueForOption(_shutdownTimeOption) + ); + } +} \ No newline at end of file diff --git a/slo/src/Cli/Configs.cs b/slo/src/Cli/Configs.cs new file mode 100644 index 00000000..d5ad8d95 --- /dev/null +++ b/slo/src/Cli/Configs.cs @@ -0,0 +1,9 @@ +namespace slo.Cli; + +internal record CreateConfig(string Endpoint, string Db, string TableName, int MinPartitionsCount, + int MaxPartitionsCount, int PartitionSize, int InitialDataCount, int WriteTimeout); + +internal record CleanUpConfig(string Endpoint, string Db, string TableName, int WriteTimeout); + +internal record RunConfig(string Endpoint, string Db, string TableName, int InitialDataCount, string PromPgw, + int ReportPeriod, int ReadRps, int ReadTimeout, int WriteRps, int WriteTimeout, int Time, int ShutdownTime); \ No newline at end of file diff --git a/slo/src/Client.cs b/slo/src/Client.cs new file mode 100644 index 00000000..810ca198 --- /dev/null +++ b/slo/src/Client.cs @@ -0,0 +1,115 @@ +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Ydb.Sdk; +using Ydb.Sdk.Services.Table; + +namespace slo; + +public class Client : IAsyncDisposable +{ + public readonly Executor Executor; + public readonly string TableName; + + private readonly ServiceProvider _serviceProvider; + private readonly Driver _driver; + private readonly TableClient _tableClient; + + private readonly Semaphore _semaphore; + + private Client(string tableName, Executor executor, ServiceProvider serviceProvider, Driver driver, + TableClient tableClient, uint sessionPoolLimit) + { + TableName = tableName; + Executor = executor; + _serviceProvider = serviceProvider; + _driver = driver; + _tableClient = tableClient; + _semaphore = new Semaphore((int)sessionPoolLimit, (int)sessionPoolLimit); + } + + public async Task Init(int initialDataCount, int partitionSize, int minPartitionsCount, int maxPartitionsCount, + TimeSpan timeout) + { + await Executor.ExecuteSchemeQuery( + Queries.GetCreateQuery(TableName, partitionSize, minPartitionsCount, maxPartitionsCount), + timeout); + + await DataGenerator.LoadMaxId(TableName, Executor); + + var tasks = new List { Capacity = initialDataCount }; + + for (var i = 0; i < initialDataCount; i++) + { + await CallFuncWithSessionPoolLimit(() => Executor.ExecuteDataQuery( + Queries.GetWriteQuery(TableName), + DataGenerator.GetUpsertData(), + timeout: timeout + )); + } + + await Task.WhenAll(tasks); + } + + public async Task CleanUp(TimeSpan timeout) + { + await Executor.ExecuteSchemeQuery(Queries.GetDropQuery(TableName), timeout); + } + + private static ServiceProvider GetServiceProvider() + { + return new ServiceCollection() + .AddLogging(configure => configure.AddConsole().SetMinimumLevel(LogLevel.Information)) + .BuildServiceProvider(); + } + + public static async Task CreateAsync(string endpoint, string db, string tableName, + uint sessionPoolLimit = 100) + { + var driverConfig = new DriverConfig( + endpoint, + db + ); + + var serviceProvider = GetServiceProvider(); + var loggerFactory = serviceProvider.GetService(); + + loggerFactory ??= NullLoggerFactory.Instance; + var driver = await Driver.CreateInitialized(driverConfig, loggerFactory); + + var tableClient = new TableClient(driver, new TableClientConfig(new SessionPoolConfig(sessionPoolLimit))); + + var executor = new Executor(tableClient); + + var table = new Client(tableName, executor, serviceProvider, driver, tableClient, sessionPoolLimit); + + return table; + } + + public Task CallFuncWithSessionPoolLimit(Func func) + { + _semaphore.WaitOne(); + + async Task FuncWithRelease() + { + try + { + await func(); + } + finally + { + _semaphore.Release(); + } + } + + _ = FuncWithRelease(); + return Task.CompletedTask; + } + + public async ValueTask DisposeAsync() + { + _tableClient.Dispose(); + await _driver.DisposeAsync(); + await _serviceProvider.DisposeAsync(); + } +} \ No newline at end of file diff --git a/slo/src/DataGenerator.cs b/slo/src/DataGenerator.cs new file mode 100644 index 00000000..745b712a --- /dev/null +++ b/slo/src/DataGenerator.cs @@ -0,0 +1,35 @@ +using Ydb.Sdk.Value; + +namespace slo; + +public static class DataGenerator +{ + private static readonly Random Random = new(); + + public static int MaxId { get; private set; } + + public static async Task LoadMaxId(string tableName, Executor executor) + { + var response = await executor.ExecuteDataQuery(Queries.GetLoadMaxIdQuery(tableName)); + var row = response.Result.ResultSets[0].Rows[0]; + var value = row[0]; + MaxId = (int?)value.GetOptionalUint64() ?? 0; + } + + public static Dictionary GetUpsertData() + { + MaxId++; + return new Dictionary + { + { "$id", YdbValue.MakeUint64((ulong)MaxId) }, + { + "$payload_str", + YdbValue.MakeUtf8(string.Join("", Enumerable + .Repeat(0, Random.Next(20, 40)) + .Select(_ => (char)new Random().Next(127)))) + }, + { "$payload_double", YdbValue.MakeDouble(Random.NextDouble()) }, + { "$payload_timestamp", YdbValue.MakeTimestamp(DateTime.Now) } + }; + } +} \ No newline at end of file diff --git a/slo/src/Dockerfile b/slo/src/Dockerfile new file mode 100644 index 00000000..3b582784 --- /dev/null +++ b/slo/src/Dockerfile @@ -0,0 +1,20 @@ +FROM mcr.microsoft.com/dotnet/sdk:7.0 AS build + +COPY ../ /src +RUN ls /src + + +WORKDIR /src/slo/src +RUN ls +RUN dotnet restore *.sln +RUN dotnet publish *.sln -c release -o /app --no-restore -f net6.0 + +##################### + +FROM mcr.microsoft.com/dotnet/runtime:6.0 AS run + +WORKDIR /app + +COPY --from=build /app ./ + +ENTRYPOINT ["./slo"] diff --git a/slo/src/Executor.cs b/slo/src/Executor.cs new file mode 100644 index 00000000..590b92ae --- /dev/null +++ b/slo/src/Executor.cs @@ -0,0 +1,71 @@ +using Prometheus; +using Ydb.Sdk.Services.Table; +using Ydb.Sdk.Value; + +namespace slo; + +public class Executor +{ + private readonly TableClient _tableClient; + + public Executor(TableClient tableClient) + { + _tableClient = tableClient; + } + + public async Task ExecuteSchemeQuery(string query, TimeSpan? timeout = null) + { + var response = await _tableClient.SessionExec( + async session => await session.ExecuteSchemeQuery(query, + new ExecuteSchemeQuerySettings { OperationTimeout = timeout, TransportTimeout = timeout * 1.1 })); + response.Status.EnsureSuccess(); + } + + public async Task ExecuteDataQuery( + string query, + Dictionary? parameters = null, + TimeSpan? timeout = null, + Histogram? attemptsHistogram = null, + Gauge? errorsGauge = null) + + { + var txControl = TxControl.BeginSerializableRW().Commit(); + + var querySettings = new ExecuteDataQuerySettings + { OperationTimeout = timeout, TransportTimeout = timeout * 1.1 }; + + var attempts = 0; + + var response = await _tableClient.SessionExec( + async session => + { + attempts++; + var response = parameters == null + ? await session.ExecuteDataQuery( + query, + txControl, + querySettings) + : await session.ExecuteDataQuery( + query, + txControl, + parameters, + querySettings); + if (!response.Status.IsSuccess) + { + errorsGauge?.WithLabels(Utils.GetResonseStatusName(response.Status.StatusCode), "retried").Inc(); + Console.WriteLine(response.Status); + } + + return response; + }); + attemptsHistogram?.WithLabels(response.Status.IsSuccess ? "ok" : "err").Observe(attempts); + if (!response.Status.IsSuccess) + { + errorsGauge?.WithLabels(Utils.GetResonseStatusName(response.Status.StatusCode), "finally").Inc(); + } + + response.Status.EnsureSuccess(); + + return (ExecuteDataQueryResponse)response; + } +} \ No newline at end of file diff --git a/slo/src/Jobs/Job.cs b/slo/src/Jobs/Job.cs new file mode 100644 index 00000000..59cc2be2 --- /dev/null +++ b/slo/src/Jobs/Job.cs @@ -0,0 +1,107 @@ +using System.Diagnostics; +using Prometheus; +using Ydb.Sdk; + +namespace slo.Jobs; + +public abstract class Job +{ + private readonly Gauge _inFlightGauge; + + private readonly Gauge _okGauge; + private readonly Gauge _notOkGauge; + + private readonly Summary _latencySummary; + + private readonly RateLimitedCaller _rateLimitedCaller; + protected readonly TimeSpan Timeout; + + protected readonly Histogram AttemptsHistogram; + protected readonly Gauge ErrorsGauge; + protected readonly Random Random = new(); + + protected readonly Client Client; + + protected Job(Client client, RateLimitedCaller rateLimitedCaller, string jobName, TimeSpan timeout) + { + Client = client; + _rateLimitedCaller = rateLimitedCaller; + Timeout = timeout; + + var metricFactory = Metrics.WithLabels(new Dictionary + { + { "jobName", jobName }, + { "sdk", "dotnet" }, + { "sdkVersion", Environment.Version.ToString() } + }); + + _okGauge = metricFactory.CreateGauge("oks", "Count of OK"); + _notOkGauge = metricFactory.CreateGauge("not_oks", "Count of not OK"); + _inFlightGauge = metricFactory.CreateGauge("in_flight", "amount of requests in flight"); + + _latencySummary = metricFactory.CreateSummary( + "latency", + "Latencies (OK)", + new[] { "status" }, + new SummaryConfiguration + { + MaxAge = TimeSpan.FromSeconds(15), + Objectives = new QuantileEpsilonPair[] + { + new(0.5, 0.05), + new(0.99, 0.005), + new(0.999, 0.0005) + } + } + ); + + AttemptsHistogram = metricFactory.CreateHistogram( + "attempts", + "summary of amount for request", + new[] { "status" }, + new HistogramConfiguration { Buckets = Histogram.LinearBuckets(1, 1, 10) }); + + ErrorsGauge = metricFactory.CreateGauge("errors", "amount of errors", new[] { "class", "in" }); + + foreach (var statusCode in Enum.GetValues()) + { + ErrorsGauge.WithLabels(Utils.GetResonseStatusName(statusCode), "retried").IncTo(0); + ErrorsGauge.WithLabels(Utils.GetResonseStatusName(statusCode), "finally").IncTo(0); + } + } + + public async void Start() + { + await _rateLimitedCaller.StartCalling( + () => Client.CallFuncWithSessionPoolLimit( + async () => await DoJob()), + _inFlightGauge); + } + + private async Task DoJob() + { + _inFlightGauge.Inc(); + var sw = Stopwatch.StartNew(); + try + { + await PerformQuery(); + sw.Stop(); + + _latencySummary.WithLabels("ok").Observe(sw.ElapsedMilliseconds); + _okGauge.Inc(); + _inFlightGauge.Dec(); + } + catch (Exception e) + { + Console.WriteLine(e); + sw.Stop(); + + _latencySummary.WithLabels("err").Observe(sw.ElapsedMilliseconds); + _notOkGauge.Inc(); + _inFlightGauge.Dec(); + throw; + } + } + + protected abstract Task PerformQuery(); +} \ No newline at end of file diff --git a/slo/src/Jobs/ReadJob.cs b/slo/src/Jobs/ReadJob.cs new file mode 100644 index 00000000..4508c781 --- /dev/null +++ b/slo/src/Jobs/ReadJob.cs @@ -0,0 +1,28 @@ +using Ydb.Sdk.Value; + +namespace slo.Jobs; + +internal class ReadJob : Job +{ + public ReadJob(Client client, RateLimitedCaller rateLimitedCaller, TimeSpan timeout) : + base(client, rateLimitedCaller, "read", timeout) + { + } + + + protected override async Task PerformQuery() + { + var parameters = new Dictionary + { + { "$id", YdbValue.MakeUint64((ulong)Random.Next(DataGenerator.MaxId)) } + }; + + await Client.Executor.ExecuteDataQuery( + Queries.GetReadQuery(Client.TableName), + parameters, + Timeout, + AttemptsHistogram, + ErrorsGauge + ); + } +} \ No newline at end of file diff --git a/slo/src/Jobs/WriteJob.cs b/slo/src/Jobs/WriteJob.cs new file mode 100644 index 00000000..45c4861e --- /dev/null +++ b/slo/src/Jobs/WriteJob.cs @@ -0,0 +1,23 @@ +namespace slo.Jobs; + +internal class WriteJob : Job +{ + public WriteJob(Client client, RateLimitedCaller rateLimitedCaller, TimeSpan timeout) : + base(client, rateLimitedCaller, "write", timeout) + { + } + + + protected override async Task PerformQuery() + { + var parameters = DataGenerator.GetUpsertData(); + + await Client.Executor.ExecuteDataQuery( + Queries.GetWriteQuery(Client.TableName), + parameters, + Timeout, + AttemptsHistogram, + ErrorsGauge + ); + } +} \ No newline at end of file diff --git a/slo/src/Program.cs b/slo/src/Program.cs new file mode 100644 index 00000000..9f41679d --- /dev/null +++ b/slo/src/Program.cs @@ -0,0 +1,3 @@ +using slo.Cli; + +return await Cli.Run(args); \ No newline at end of file diff --git a/slo/src/Queries.cs b/slo/src/Queries.cs new file mode 100644 index 00000000..26d71c4c --- /dev/null +++ b/slo/src/Queries.cs @@ -0,0 +1,58 @@ +namespace slo; + +public static class Queries +{ + public static string GetCreateQuery(string tableName, int partitionSize, int minPartitionsCount, + int maxPartitionsCount) + { + return $@" +CREATE TABLE `{tableName}` ( + `hash` UINT64, + `id` UINT64, + `payload_str` UTF8, + `payload_double` DOUBLE, + `payload_timestamp` TIMESTAMP, + `payload_hash` UINT64, + PRIMARY KEY (`hash`, `id`) +) +WITH ( + AUTO_PARTITIONING_BY_SIZE = ENABLED, + AUTO_PARTITIONING_PARTITION_SIZE_MB = {partitionSize}, + AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = {minPartitionsCount}, + AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = {maxPartitionsCount} +); +"; + } + + public static string GetDropQuery(string tableName) + { + return $"DROP TABLE `{tableName}`"; + } + + public static string GetLoadMaxIdQuery(string tableName) + { + return $"SELECT MAX(id) as max_id FROM `{tableName}`"; + } + + public static string GetReadQuery(string tableName) + { + return $@" +DECLARE $id AS Uint64; +SELECT id, payload_str, payload_double, payload_timestamp, payload_hash +FROM `{tableName}` +WHERE id = $id AND hash = Digest::NumericHash($id) +"; + } + + public static string GetWriteQuery(string tableName) + { + return $@" +DECLARE $id AS Uint64; +DECLARE $payload_str AS Utf8; +DECLARE $payload_double AS Double; +DECLARE $payload_timestamp AS Timestamp; +INSERT INTO `{tableName}` (id, hash, payload_str, payload_double, payload_timestamp) +VALUES ($id, Digest::NumericHash($id), $payload_str, $payload_double, $payload_timestamp) +"; + } +} \ No newline at end of file diff --git a/slo/src/README.md b/slo/src/README.md new file mode 100644 index 00000000..7ce80dbd --- /dev/null +++ b/slo/src/README.md @@ -0,0 +1,130 @@ +# SLO workload + +SLO is the type of test where app based on ydb-sdk is tested against falling YDB cluster nodes, tablets, network +(that is possible situations for distributed DBs with hundreds of nodes) + +### Usage: + +It has 3 commands: + +- `create` - creates table in database +- `cleanup` - drops table in database +- `run` - runs workload (read and write to table with sets RPS) + +### Run examples with all arguments: + +create: + +`slo create grpcs://ydb.cool.example.com:2135 /some/folder -t tableName +--min-partitions-count 6 --max-partitions-count 1000 --partition-size 1 -с 1000 +--write-timeout 10000` + +cleanup: + +`slo cleanup grpcs://ydb.cool.example.com:2135 /some/folder -t tableName` + +run: + +`slo create run grpcs://ydb.cool.example.com:2135 /some/folder -t tableName +--prom-pgw http://prometheus-pushgateway:9091 -report-period 250 +--read-rps 1000 --read-timeout 10000 +--write-rps 100 --write-timeout 10000 +--time 600 --shutdown-time 30` + +## Arguments for commands: + +### create +`slo create [ []] [options]` + +``` +Arguments: + YDB endpoint to connect to + YDB database to connect to + +Options: + -t, --table-name table name to create + [default: testingTable] + --min-partitions-count minimum amount of partitions in table [default: 6] + --max-partitions-count maximum amount of partitions in table [default: 1000] + --partition-size partition size in mb [default: 1] + -c, --initial-data-count amount of initially created rows [default: 1000] + --write-timeout write timeout milliseconds [default: 10000] +``` + +### cleanup +`slo cleanup [ []] [options]` + +``` +Arguments: + YDB endpoint to connect to + YDB database to connect to + +Options: + -t, --table-name table name to create + [default: testingTable] + --write-timeout write timeout milliseconds [default: 10000] +``` + +### run +`slo run [ []] [options]` + +``` +Arguments: + YDB endpoint to connect to + YDB database to connect to + +Options: + -t, --table-name table name to create + [default: testingTable] + --prom-pgw (REQUIRED) minimum amount of partitions in table + --report-period prometheus push period in milliseconds [default: 250] + --read-rps read RPS [default: 1000] + --read-timeout read timeout milliseconds [default: 10000] + --write-rps write RPS [default: 100] + --time