From 5d413ae674c1bfa819cb9104b9650fd67166d7d0 Mon Sep 17 00:00:00 2001 From: Daniel Jaglowski Date: Tue, 5 Dec 2023 15:03:31 -0800 Subject: [PATCH] [chore][pkg/stanza] Move more tests into reader package (#29666) This also includes some related refactoring of test code: - Moved `tokenWithLength` utility function to `filetest`, since it's not used by both `fileconsumer` and `reader` packages. - Refactored `testFactory` to start with a default config and then apply options as necessary per test. I expect eventually the _actual_ `reader.Factory` will be cleaned up to support a similar pattern, but I'm trying to keep changes to test code until we have better coverage in the `reader` package. --- pkg/stanza/fileconsumer/benchmark_test.go | 2 +- pkg/stanza/fileconsumer/file_test.go | 189 +----------------- .../internal/filetest/filetest.go | 10 + .../internal/reader/factory_test.go | 85 ++++++-- .../internal/reader/reader_test.go | 182 +++++++++++++++++ .../internal/reader/split_test.go | 13 +- pkg/stanza/fileconsumer/rotation_test.go | 8 +- pkg/stanza/fileconsumer/util_test.go | 10 - 8 files changed, 282 insertions(+), 217 deletions(-) create mode 100644 pkg/stanza/fileconsumer/internal/reader/reader_test.go diff --git a/pkg/stanza/fileconsumer/benchmark_test.go b/pkg/stanza/fileconsumer/benchmark_test.go index e9bcfd51d88e..168e03c3f73f 100644 --- a/pkg/stanza/fileconsumer/benchmark_test.go +++ b/pkg/stanza/fileconsumer/benchmark_test.go @@ -28,7 +28,7 @@ type benchFile struct { } func simpleTextFile(b *testing.B, file *os.File) *benchFile { - line := string(tokenWithLength(49)) + "\n" + line := string(filetest.TokenWithLength(49)) + "\n" return &benchFile{ File: file, log: func(_ int) { diff --git a/pkg/stanza/fileconsumer/file_test.go b/pkg/stanza/fileconsumer/file_test.go index b23415dd7741..9667ac59b4a5 100644 --- a/pkg/stanza/fileconsumer/file_test.go +++ b/pkg/stanza/fileconsumer/file_test.go @@ -733,7 +733,7 @@ func TestDecodeBufferIsResized(t *testing.T) { }() temp := filetest.OpenTemp(t, tempDir) - expected := tokenWithLength(1<<12 + 1) + expected := filetest.TokenWithLength(1<<12 + 1) filetest.WriteString(t, temp, string(expected)+"\n") sink.ExpectToken(t, expected) @@ -938,10 +938,10 @@ func TestRestartOffsets(t *testing.T) { logFile := filetest.OpenTemp(t, tempDir) - before1stRun := tokenWithLength(tc.lineLength) - during1stRun := tokenWithLength(tc.lineLength) - duringRestart := tokenWithLength(tc.lineLength) - during2ndRun := tokenWithLength(tc.lineLength) + before1stRun := filetest.TokenWithLength(tc.lineLength) + during1stRun := filetest.TokenWithLength(tc.lineLength) + duringRestart := filetest.TokenWithLength(tc.lineLength) + during2ndRun := filetest.TokenWithLength(tc.lineLength) operatorOne, sink1 := testManager(t, cfg) filetest.WriteString(t, logFile, string(before1stRun)+"\n") @@ -1028,7 +1028,7 @@ func TestFileBatching(t *testing.T) { expectedTokens := make([][]byte, 0, files*linesPerFile) for i, temp := range temps { for j := 0; j < linesPerFile; j++ { - message := fmt.Sprintf("%s %d %d", tokenWithLength(100), i, j) + message := fmt.Sprintf("%s %d %d", filetest.TokenWithLength(100), i, j) _, err := temp.WriteString(message + "\n") require.NoError(t, err) expectedTokens = append(expectedTokens, []byte(message)) @@ -1045,7 +1045,7 @@ func TestFileBatching(t *testing.T) { expectedTokens = make([][]byte, 0, files*linesPerFile) for i, temp := range temps { for j := 0; j < linesPerFile; j++ { - message := fmt.Sprintf("%s %d %d", tokenWithLength(20), i, j) + message := fmt.Sprintf("%s %d %d", filetest.TokenWithLength(20), i, j) _, err := temp.WriteString(message + "\n") require.NoError(t, err) expectedTokens = append(expectedTokens, []byte(message)) @@ -1110,173 +1110,6 @@ func TestFileBatchingRespectsStartAtEnd(t *testing.T) { sink.ExpectTokens(t, expectedTokens...) } -func TestFileReader_FingerprintUpdated(t *testing.T) { - t.Parallel() - - tempDir := t.TempDir() - cfg := NewConfig().includeDir(tempDir) - cfg.StartAt = "beginning" - operator, sink := testManager(t, cfg) - - temp := filetest.OpenTemp(t, tempDir) - tempCopy := filetest.OpenFile(t, temp.Name()) - fp, err := operator.readerFactory.NewFingerprint(temp) - require.NoError(t, err) - - reader, err := operator.readerFactory.NewReader(tempCopy, fp) - require.NoError(t, err) - defer reader.Close() - - filetest.WriteString(t, temp, "testlog1\n") - reader.ReadToEnd(context.Background()) - sink.ExpectToken(t, []byte("testlog1")) - require.Equal(t, []byte("testlog1\n"), reader.Fingerprint.FirstBytes) -} - -// Test that a fingerprint: -// - Starts empty -// - Updates as a file is read -// - Stops updating when the max fingerprint size is reached -// - Stops exactly at max fingerprint size, regardless of content -func TestFingerprintGrowsAndStops(t *testing.T) { - t.Parallel() - - // Use a number with many factors. - // Sometimes fingerprint length will align with - // the end of a line, sometimes not. Test both. - maxFP := 360 - - // Use prime numbers to ensure variation in - // whether or not they are factors of maxFP - lineLens := []int{3, 5, 7, 11, 13, 17, 19, 23, 27} - - for _, lineLen := range lineLens { - t.Run(fmt.Sprintf("%d", lineLen), func(t *testing.T) { - t.Parallel() - - tempDir := t.TempDir() - cfg := NewConfig().includeDir(tempDir) - cfg.StartAt = "beginning" - cfg.FingerprintSize = helper.ByteSize(maxFP) - operator, _ := testManager(t, cfg) - - temp := filetest.OpenTemp(t, tempDir) - tempCopy := filetest.OpenFile(t, temp.Name()) - fp, err := operator.readerFactory.NewFingerprint(temp) - require.NoError(t, err) - require.Equal(t, []byte(""), fp.FirstBytes) - - reader, err := operator.readerFactory.NewReader(tempCopy, fp) - require.NoError(t, err) - defer reader.Close() - - // keep track of what has been written to the file - var fileContent []byte - - // keep track of expected fingerprint size - expectedFP := 0 - - // Write lines until file is much larger than the length of the fingerprint - for len(fileContent) < 2*maxFP { - expectedFP += lineLen - if expectedFP > maxFP { - expectedFP = maxFP - } - - line := string(tokenWithLength(lineLen-1)) + "\n" - fileContent = append(fileContent, []byte(line)...) - - filetest.WriteString(t, temp, line) - reader.ReadToEnd(context.Background()) - require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) - } - }) - } -} - -// This is same test like TestFingerprintGrowsAndStops, but with additional check for fingerprint size check -// Test that a fingerprint: -// - Starts empty -// - Updates as a file is read -// - Stops updating when the max fingerprint size is reached -// - Stops exactly at max fingerprint size, regardless of content -// - Do not change size after fingerprint configuration change -func TestFingerprintChangeSize(t *testing.T) { - t.Parallel() - - // Use a number with many factors. - // Sometimes fingerprint length will align with - // the end of a line, sometimes not. Test both. - maxFP := 360 - - // Use prime numbers to ensure variation in - // whether or not they are factors of maxFP - lineLens := []int{3, 5, 7, 11, 13, 17, 19, 23, 27} - - for _, lineLen := range lineLens { - t.Run(fmt.Sprintf("%d", lineLen), func(t *testing.T) { - t.Parallel() - - tempDir := t.TempDir() - cfg := NewConfig().includeDir(tempDir) - cfg.StartAt = "beginning" - cfg.FingerprintSize = helper.ByteSize(maxFP) - operator, _ := testManager(t, cfg) - - temp := filetest.OpenTemp(t, tempDir) - tempCopy := filetest.OpenFile(t, temp.Name()) - fp, err := operator.readerFactory.NewFingerprint(temp) - require.NoError(t, err) - require.Equal(t, []byte(""), fp.FirstBytes) - - reader, err := operator.readerFactory.NewReader(tempCopy, fp) - require.NoError(t, err) - defer reader.Close() - - // keep track of what has been written to the file - var fileContent []byte - - // keep track of expected fingerprint size - expectedFP := 0 - - // Write lines until file is much larger than the length of the fingerprint - for len(fileContent) < 2*maxFP { - expectedFP += lineLen - if expectedFP > maxFP { - expectedFP = maxFP - } - - line := string(tokenWithLength(lineLen-1)) + "\n" - fileContent = append(fileContent, []byte(line)...) - - filetest.WriteString(t, temp, line) - reader.ReadToEnd(context.Background()) - require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) - } - - // Test fingerprint change - // Change fingerprint and try to read file again - // We do not expect fingerprint change - // We test both increasing and decreasing fingerprint size - reader.Config.FingerprintSize = maxFP * (lineLen / 3) - line := string(tokenWithLength(lineLen-1)) + "\n" - fileContent = append(fileContent, []byte(line)...) - - filetest.WriteString(t, temp, line) - reader.ReadToEnd(context.Background()) - require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) - - reader.Config.FingerprintSize = maxFP / 2 - line = string(tokenWithLength(lineLen-1)) + "\n" - fileContent = append(fileContent, []byte(line)...) - - filetest.WriteString(t, temp, line) - reader.ReadToEnd(context.Background()) - require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) - }) - } -} - func TestEncodings(t *testing.T) { t.Parallel() cases := []struct { @@ -1379,7 +1212,7 @@ func TestDeleteAfterRead(t *testing.T) { // Write logs to each file for i, temp := range temps { for j := 0; j < linesPerFile; j++ { - line := tokenWithLength(100) + line := filetest.TokenWithLength(100) message := fmt.Sprintf("%s %d %d", line, i, j) _, err := temp.WriteString(message + "\n") require.NoError(t, err) @@ -1436,7 +1269,7 @@ func TestMaxBatching(t *testing.T) { numExpectedTokens := expectedMaxFilesPerPoll * linesPerFile for i, temp := range temps { for j := 0; j < linesPerFile; j++ { - message := fmt.Sprintf("%s %d %d", tokenWithLength(100), i, j) + message := fmt.Sprintf("%s %d %d", filetest.TokenWithLength(100), i, j) _, err := temp.WriteString(message + "\n") require.NoError(t, err) } @@ -1451,7 +1284,7 @@ func TestMaxBatching(t *testing.T) { // Write more logs to each file so we can validate that all files are still known for i, temp := range temps { for j := 0; j < linesPerFile; j++ { - message := fmt.Sprintf("%s %d %d", tokenWithLength(20), i, j) + message := fmt.Sprintf("%s %d %d", filetest.TokenWithLength(20), i, j) _, err := temp.WriteString(message + "\n") require.NoError(t, err) } @@ -1519,7 +1352,7 @@ func TestDeleteAfterRead_SkipPartials(t *testing.T) { longFile := filetest.OpenTemp(t, tempDir) for line := 0; line < longFileLines; line++ { - _, err := longFile.WriteString(string(tokenWithLength(100)) + "\n") + _, err := longFile.WriteString(string(filetest.TokenWithLength(100)) + "\n") require.NoError(t, err) } require.NoError(t, longFile.Close()) diff --git a/pkg/stanza/fileconsumer/internal/filetest/filetest.go b/pkg/stanza/fileconsumer/internal/filetest/filetest.go index c9ea3b209493..4c58cf3762a9 100644 --- a/pkg/stanza/fileconsumer/internal/filetest/filetest.go +++ b/pkg/stanza/fileconsumer/internal/filetest/filetest.go @@ -4,6 +4,7 @@ package filetest // import "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer/internal/filetest" import ( + "math/rand" "os" "path/filepath" "testing" @@ -37,3 +38,12 @@ func WriteString(t testing.TB, file *os.File, s string) { _, err := file.WriteString(s) require.NoError(t, err) } + +func TokenWithLength(length int) []byte { + charset := "abcdefghijklmnopqrstuvwxyz" + b := make([]byte, length) + for i := range b { + b[i] = charset[rand.Intn(len(charset))] + } + return b +} diff --git a/pkg/stanza/fileconsumer/internal/reader/factory_test.go b/pkg/stanza/fileconsumer/internal/reader/factory_test.go index 7d46593d50ee..e2b3b83c532e 100644 --- a/pkg/stanza/fileconsumer/internal/reader/factory_test.go +++ b/pkg/stanza/fileconsumer/internal/reader/factory_test.go @@ -8,8 +8,9 @@ import ( "time" "github.com/stretchr/testify/require" + "golang.org/x/text/encoding" + "golang.org/x/text/encoding/unicode" - "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/decode" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer/internal/emittest" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer/internal/fingerprint" "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/split" @@ -18,32 +19,82 @@ import ( ) const ( - defaultMaxLogSize = 1024 * 1024 - defaultMaxConcurrentFiles = 1024 - defaultEncoding = "utf-8" - defaultPollInterval = 200 * time.Millisecond - defaultFlushPeriod = 500 * time.Millisecond + defaultMaxLogSize = 1024 * 1024 + defaultFlushPeriod = 500 * time.Millisecond ) -func testFactory(t *testing.T, sCfg split.Config, maxLogSize int, flushPeriod time.Duration) (*Factory, *emittest.Sink) { - enc, err := decode.LookupEncoding(defaultEncoding) - require.NoError(t, err) +func testFactory(t *testing.T, opts ...testFactoryOpt) (*Factory, *emittest.Sink) { + cfg := &testFactoryCfg{ + fingerprintSize: fingerprint.DefaultSize, + fromBeginning: true, + maxLogSize: defaultMaxLogSize, + encoding: unicode.UTF8, + trimFunc: trim.Whitespace, + flushPeriod: defaultFlushPeriod, + sinkCallBufferSize: 100, + } + for _, opt := range opts { + opt(cfg) + } - splitFunc, err := sCfg.Func(enc, false, maxLogSize) + splitFunc, err := cfg.splitCfg.Func(cfg.encoding, false, cfg.maxLogSize) require.NoError(t, err) - sink := emittest.NewSink() + sink := emittest.NewSink(emittest.WithCallBuffer(cfg.sinkCallBufferSize)) return &Factory{ SugaredLogger: testutil.Logger(t), Config: &Config{ - FingerprintSize: fingerprint.DefaultSize, - MaxLogSize: maxLogSize, + FingerprintSize: cfg.fingerprintSize, + MaxLogSize: cfg.maxLogSize, + FlushTimeout: cfg.flushPeriod, Emit: sink.Callback, - FlushTimeout: flushPeriod, }, - FromBeginning: true, - Encoding: enc, + FromBeginning: cfg.fromBeginning, + Encoding: cfg.encoding, SplitFunc: splitFunc, - TrimFunc: trim.Whitespace, + TrimFunc: cfg.trimFunc, }, sink } + +type testFactoryOpt func(*testFactoryCfg) + +type testFactoryCfg struct { + fingerprintSize int + fromBeginning bool + maxLogSize int + encoding encoding.Encoding + splitCfg split.Config + trimFunc trim.Func + flushPeriod time.Duration + sinkCallBufferSize int +} + +func withFingerprintSize(size int) testFactoryOpt { + return func(c *testFactoryCfg) { + c.fingerprintSize = size + } +} + +func withSplitConfig(cfg split.Config) testFactoryOpt { + return func(c *testFactoryCfg) { + c.splitCfg = cfg + } +} + +func withMaxLogSize(maxLogSize int) testFactoryOpt { + return func(c *testFactoryCfg) { + c.maxLogSize = maxLogSize + } +} + +func withFlushPeriod(flushPeriod time.Duration) testFactoryOpt { + return func(c *testFactoryCfg) { + c.flushPeriod = flushPeriod + } +} + +func withSinkBufferSize(n int) testFactoryOpt { + return func(c *testFactoryCfg) { + c.sinkCallBufferSize = n + } +} diff --git a/pkg/stanza/fileconsumer/internal/reader/reader_test.go b/pkg/stanza/fileconsumer/internal/reader/reader_test.go new file mode 100644 index 000000000000..58baeea97277 --- /dev/null +++ b/pkg/stanza/fileconsumer/internal/reader/reader_test.go @@ -0,0 +1,182 @@ +// Copyright The OpenTelemetry Authors +// SPDX-License-Identifier: Apache-2.0 + +package reader + +import ( + "context" + "fmt" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer/internal/filetest" +) + +func TestFileReader_FingerprintUpdated(t *testing.T) { + t.Parallel() + + tempDir := t.TempDir() + temp := filetest.OpenTemp(t, tempDir) + tempCopy := filetest.OpenFile(t, temp.Name()) + + f, sink := testFactory(t) + fp, err := f.NewFingerprint(temp) + require.NoError(t, err) + + reader, err := f.NewReader(tempCopy, fp) + require.NoError(t, err) + defer reader.Close() + + filetest.WriteString(t, temp, "testlog1\n") + reader.ReadToEnd(context.Background()) + sink.ExpectToken(t, []byte("testlog1")) + require.Equal(t, []byte("testlog1\n"), reader.Fingerprint.FirstBytes) +} + +// Test that a fingerprint: +// - Starts empty +// - Updates as a file is read +// - Stops updating when the max fingerprint size is reached +// - Stops exactly at max fingerprint size, regardless of content +func TestFingerprintGrowsAndStops(t *testing.T) { + t.Parallel() + + // Use a number with many factors. + // Sometimes fingerprint length will align with + // the end of a line, sometimes not. Test both. + fpSize := 360 + + // Use prime numbers to ensure variation in + // whether or not they are factors of fpSize + lineLens := []int{3, 5, 7, 11, 13, 17, 19, 23, 27} + + for _, lineLen := range lineLens { + lineLen := lineLen + t.Run(fmt.Sprintf("%d", lineLen), func(t *testing.T) { + t.Parallel() + + tempDir := t.TempDir() + temp := filetest.OpenTemp(t, tempDir) + tempCopy := filetest.OpenFile(t, temp.Name()) + + f, _ := testFactory(t, withSinkBufferSize(3*fpSize/lineLen), withFingerprintSize(fpSize)) + fp, err := f.NewFingerprint(temp) + require.NoError(t, err) + require.Equal(t, []byte(""), fp.FirstBytes) + + reader, err := f.NewReader(tempCopy, fp) + require.NoError(t, err) + defer reader.Close() + + // keep track of what has been written to the file + var fileContent []byte + + // keep track of expected fingerprint size + expectedFP := 0 + + // Write lines until file is much larger than the length of the fingerprint + for len(fileContent) < 2*fpSize { + expectedFP += lineLen + if expectedFP > fpSize { + expectedFP = fpSize + } + + line := string(filetest.TokenWithLength(lineLen-1)) + "\n" + fileContent = append(fileContent, []byte(line)...) + + filetest.WriteString(t, temp, line) + reader.ReadToEnd(context.Background()) + require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) + } + }) + } +} + +// This is same test like TestFingerprintGrowsAndStops, but with additional check for fingerprint size check +// Test that a fingerprint: +// - Starts empty +// - Updates as a file is read +// - Stops updating when the max fingerprint size is reached +// - Stops exactly at max fingerprint size, regardless of content +// - Do not change size after fingerprint configuration change +func TestFingerprintChangeSize(t *testing.T) { + t.Parallel() + + // Use a number with many factors. + // Sometimes fingerprint length will align with + // the end of a line, sometimes not. Test both. + fpSize := 360 + + // Use prime numbers to ensure variation in + // whether or not they are factors of fpSize + lineLens := []int{3, 5, 7, 11, 13, 17, 19, 23, 27} + + for _, lineLen := range lineLens { + lineLen := lineLen + t.Run(fmt.Sprintf("%d", lineLen), func(t *testing.T) { + t.Parallel() + + f, _ := testFactory(t, withSinkBufferSize(3*fpSize/lineLen), withFingerprintSize(fpSize)) + + tempDir := t.TempDir() + temp := filetest.OpenTemp(t, tempDir) + + fp, err := f.NewFingerprint(temp) + require.NoError(t, err) + require.Equal(t, []byte(""), fp.FirstBytes) + + reader, err := f.NewReader(filetest.OpenFile(t, temp.Name()), fp) + require.NoError(t, err) + + // keep track of what has been written to the file + var fileContent []byte + + // keep track of expected fingerprint size + expectedFP := 0 + + // Write lines until file is much larger than the length of the fingerprint + for len(fileContent) < 2*fpSize { + expectedFP += lineLen + if expectedFP > fpSize { + expectedFP = fpSize + } + + line := string(filetest.TokenWithLength(lineLen-1)) + "\n" + fileContent = append(fileContent, []byte(line)...) + + filetest.WriteString(t, temp, line) + reader.ReadToEnd(context.Background()) + require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) + } + + // Recreate the factory with a larger fingerprint size + f, _ = testFactory(t, withSinkBufferSize(3*fpSize/lineLen), withFingerprintSize(fpSize*lineLen/3)) + + // Recreate the reader with the new factory + reader, err = f.NewReaderFromMetadata(filetest.OpenFile(t, temp.Name()), reader.Close()) + require.NoError(t, err) + + line := string(filetest.TokenWithLength(lineLen-1)) + "\n" + fileContent = append(fileContent, []byte(line)...) + + filetest.WriteString(t, temp, line) + reader.ReadToEnd(context.Background()) + require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) + + // Recreate the factory with a smaller fingerprint size + f, _ = testFactory(t, withSinkBufferSize(3*fpSize/lineLen), withFingerprintSize(fpSize/2)) + + // Recreate the reader with the new factory + reader, err = f.NewReaderFromMetadata(filetest.OpenFile(t, temp.Name()), reader.Close()) + require.NoError(t, err) + + line = string(filetest.TokenWithLength(lineLen-1)) + "\n" + fileContent = append(fileContent, []byte(line)...) + + filetest.WriteString(t, temp, line) + reader.ReadToEnd(context.Background()) + require.Equal(t, fileContent[:expectedFP], reader.Fingerprint.FirstBytes) + }) + } +} diff --git a/pkg/stanza/fileconsumer/internal/reader/split_test.go b/pkg/stanza/fileconsumer/internal/reader/split_test.go index 194c924d88d5..194009897a8c 100644 --- a/pkg/stanza/fileconsumer/internal/reader/split_test.go +++ b/pkg/stanza/fileconsumer/internal/reader/split_test.go @@ -21,7 +21,7 @@ import ( func TestPersistFlusher(t *testing.T) { flushPeriod := 100 * time.Millisecond - f, sink := testFactory(t, split.Config{}, defaultMaxLogSize, flushPeriod) + f, sink := testFactory(t, withFlushPeriod(flushPeriod)) temp := filetest.OpenTemp(t, t.TempDir()) fp, err := f.NewFingerprint(temp) @@ -107,7 +107,7 @@ func TestTokenization(t *testing.T) { for _, tc := range testCases { t.Run(tc.testName, func(t *testing.T) { - f, sink := testFactory(t, split.Config{}, defaultMaxLogSize, defaultFlushPeriod) + f, sink := testFactory(t) temp := filetest.OpenTemp(t, t.TempDir()) _, err := temp.Write(tc.fileContent) @@ -137,7 +137,7 @@ func TestTokenizationTooLong(t *testing.T) { []byte("aaa"), } - f, sink := testFactory(t, split.Config{}, 10, defaultFlushPeriod) + f, sink := testFactory(t, withMaxLogSize(10)) temp := filetest.OpenTemp(t, t.TempDir()) _, err := temp.Write(fileContent) @@ -167,9 +167,8 @@ func TestTokenizationTooLongWithLineStartPattern(t *testing.T) { []byte("2023-01-01 2"), } - sCfg := split.Config{} - sCfg.LineStartPattern = `\d+-\d+-\d+` - f, sink := testFactory(t, sCfg, 15, defaultFlushPeriod) + sCfg := split.Config{LineStartPattern: `\d+-\d+-\d+`} + f, sink := testFactory(t, withSplitConfig(sCfg), withMaxLogSize(15)) temp := filetest.OpenTemp(t, t.TempDir()) _, err := temp.Write(fileContent) @@ -191,7 +190,7 @@ func TestTokenizationTooLongWithLineStartPattern(t *testing.T) { func TestHeaderFingerprintIncluded(t *testing.T) { fileContent := []byte("#header-line\naaa\n") - f, _ := testFactory(t, split.Config{}, 10, defaultFlushPeriod) + f, _ := testFactory(t, withMaxLogSize(10)) regexConf := regex.NewConfig() regexConf.Regex = "^#(?P
.*)" diff --git a/pkg/stanza/fileconsumer/rotation_test.go b/pkg/stanza/fileconsumer/rotation_test.go index 61b9b0883f93..288ad58a1fbe 100644 --- a/pkg/stanza/fileconsumer/rotation_test.go +++ b/pkg/stanza/fileconsumer/rotation_test.go @@ -275,7 +275,7 @@ func (rt rotationTest) run(tc rotationTest, copyTruncate, sequential bool) func( logger := log.New(&rotator, "", 0) expected := make([][]byte, 0, tc.totalLines) - baseStr := string(tokenWithLength(46)) // + ' 123' + baseStr := string(filetest.TokenWithLength(46)) // + ' 123' for i := 0; i < tc.totalLines; i++ { expected = append(expected, []byte(fmt.Sprintf("%s %3d", baseStr, i))) } @@ -626,9 +626,9 @@ func TestFileMovedWhileOff_BigFiles(t *testing.T) { operator, sink := testManager(t, cfg) persister := testutil.NewUnscopedMockPersister() - log1 := tokenWithLength(1001) - log2 := tokenWithLength(1002) - log3 := tokenWithLength(1003) + log1 := filetest.TokenWithLength(1001) + log2 := filetest.TokenWithLength(1002) + log3 := filetest.TokenWithLength(1003) temp := filetest.OpenTemp(t, tempDir) tempName := temp.Name() diff --git a/pkg/stanza/fileconsumer/util_test.go b/pkg/stanza/fileconsumer/util_test.go index 23dab8601fcc..aa6c8564d5b1 100644 --- a/pkg/stanza/fileconsumer/util_test.go +++ b/pkg/stanza/fileconsumer/util_test.go @@ -4,7 +4,6 @@ package fileconsumer import ( - "math/rand" "testing" "github.com/stretchr/testify/require" @@ -24,12 +23,3 @@ func testManagerWithSink(t *testing.T, cfg *Config, sink *emittest.Sink) *Manage t.Cleanup(func() { input.closePreviousFiles() }) return input } - -func tokenWithLength(length int) []byte { - charset := "abcdefghijklmnopqrstuvwxyz" - b := make([]byte, length) - for i := range b { - b[i] = charset[rand.Intn(len(charset))] - } - return b -}