From fa3d7be14a8093f338da7b101fb1c8a68a778145 Mon Sep 17 00:00:00 2001 From: Philip Potter Date: Fri, 8 Dec 2023 17:31:04 +0000 Subject: [PATCH] make requests in parallel Tentatively to see if it improves weird performance issues --- pkg/download/consistent_hashing.go | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/pkg/download/consistent_hashing.go b/pkg/download/consistent_hashing.go index 19ce77a..362021d 100644 --- a/pkg/download/consistent_hashing.go +++ b/pkg/download/consistent_hashing.go @@ -141,10 +141,10 @@ func (m *ConsistentHashingMode) Fetch(ctx context.Context, urlString string) (io // this happens if we've already downloaded the whole first slice continue } - start := m.SliceSize * int64(slice) + startFrom := m.SliceSize * int64(slice) sliceSize := m.SliceSize if slice == 0 { - start = firstChunkResp.ContentLength + startFrom = firstChunkResp.ContentLength sliceSize = sliceSize - firstChunkResp.ContentLength } if slice == int(totalSlices)-1 { @@ -160,20 +160,23 @@ func (m *ConsistentHashingMode) Fetch(ctx context.Context, urlString string) (io } chunkSizes := EqualSplit(sliceSize, numChunks) for _, chunkSize := range chunkSizes { - end := start + chunkSize - 1 + // startFrom changes each time round the loop + // we create chunkStart to be a stable variable for the goroutine to capture + chunkStart := startFrom + chunkEnd := startFrom + chunkSize - 1 - logger.Debug().Int64("start", start).Int64("end", end).Msg("starting request") - resp, err := m.doRequest(ctx, start, end, urlString) - if err != nil { - return nil, -1, err - } - - dataSlice := data[start : end+1] + dataSlice := data[chunkStart : chunkEnd+1] errGroup.Go(func() error { + logger.Debug().Int64("start", chunkStart).Int64("end", chunkEnd).Msg("starting request") + resp, err := m.doRequest(ctx, chunkStart, chunkEnd, urlString) + if err != nil { + return err + } + return m.downloadChunk(resp, dataSlice) }) - start = start + chunkSize + startFrom = startFrom + chunkSize } }