Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Ability to batch messages in topic reader #491

Merged
merged 5 commits into from
Sep 27, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 10 additions & 11 deletions tests/topics/test_topic_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,14 @@ async def test_read_and_commit_with_close_reader(self, driver, topic_with_messag
assert message != message2

async def test_read_and_commit_with_ack(self, driver, topic_with_messages, topic_consumer):
reader = driver.topic_client.reader(topic_with_messages, topic_consumer)
batch = await reader.receive_batch()
await reader.commit_with_ack(batch)
async with driver.topic_client.reader(topic_with_messages, topic_consumer) as reader:
message = await reader.receive_message()
await reader.commit_with_ack(message)

reader = driver.topic_client.reader(topic_with_messages, topic_consumer)
batch2 = await reader.receive_batch()
assert batch.messages[0] != batch2.messages[0]
async with driver.topic_client.reader(topic_with_messages, topic_consumer) as reader:
batch = await reader.receive_batch()

await reader.close()
assert message != batch.messages[0]

async def test_read_compressed_messages(self, driver, topic_path, topic_consumer):
async with driver.topic_client.writer(topic_path, codec=ydb.TopicCodec.GZIP) as writer:
Expand Down Expand Up @@ -147,12 +146,12 @@ def test_read_and_commit_with_close_reader(self, driver_sync, topic_with_message

def test_read_and_commit_with_ack(self, driver_sync, topic_with_messages, topic_consumer):
reader = driver_sync.topic_client.reader(topic_with_messages, topic_consumer)
batch = reader.receive_batch()
reader.commit_with_ack(batch)
message = reader.receive_message()
reader.commit_with_ack(message)

reader = driver_sync.topic_client.reader(topic_with_messages, topic_consumer)
batch2 = reader.receive_batch()
assert batch.messages[0] != batch2.messages[0]
batch = reader.receive_batch()
assert message != batch.messages[0]

def test_read_compressed_messages(self, driver_sync, topic_path, topic_consumer):
with driver_sync.topic_client.writer(topic_path, codec=ydb.TopicCodec.GZIP) as writer:
Expand Down
38 changes: 20 additions & 18 deletions tests/topics/test_topic_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,13 @@ async def test_random_producer_id(self, driver: ydb.aio.Driver, topic_path, topi
async with driver.topic_client.writer(topic_path) as writer:
await writer.write(ydb.TopicWriterMessage(data="123".encode()))

batch1 = await topic_reader.receive_batch()
batch2 = await topic_reader.receive_batch()
batch = await topic_reader.receive_batch()

if len(batch.messages) == 1:
batch2 = await topic_reader.receive_batch()
batch.messages.extend(batch2.messages)

assert batch1.messages[0].producer_id != batch2.messages[0].producer_id
assert batch.messages[0].producer_id != batch.messages[1].producer_id

async def test_auto_flush_on_close(self, driver: ydb.aio.Driver, topic_path):
async with driver.topic_client.writer(
Expand Down Expand Up @@ -83,12 +86,12 @@ async def test_write_multi_message_with_ack(
assert batch.messages[0].seqno == 1
assert batch.messages[0].data == "123".encode()

# remove second recieve batch when implement batching
# https://github.com/ydb-platform/ydb-python-sdk/issues/142
batch = await topic_reader.receive_batch()
assert batch.messages[0].offset == 1
assert batch.messages[0].seqno == 2
assert batch.messages[0].data == "456".encode()
# # remove second recieve batch when implement batching
# # https://github.com/ydb-platform/ydb-python-sdk/issues/142
# batch = await topic_reader.receive_batch()
assert batch.messages[1].offset == 1
assert batch.messages[1].seqno == 2
assert batch.messages[1].data == "456".encode()

@pytest.mark.parametrize(
"codec",
Expand Down Expand Up @@ -186,10 +189,9 @@ def test_random_producer_id(
with driver_sync.topic_client.writer(topic_path) as writer:
writer.write(ydb.TopicWriterMessage(data="123".encode()))

batch1 = topic_reader_sync.receive_batch()
batch2 = topic_reader_sync.receive_batch()
batch = topic_reader_sync.receive_batch()

assert batch1.messages[0].producer_id != batch2.messages[0].producer_id
assert batch.messages[0].producer_id != batch.messages[1].producer_id

def test_write_multi_message_with_ack(
self, driver_sync: ydb.Driver, topic_path, topic_reader_sync: ydb.TopicReader
Expand All @@ -203,17 +205,17 @@ def test_write_multi_message_with_ack(
)

batch = topic_reader_sync.receive_batch()
if len(batch.messages) == 1:
batch2 = topic_reader_sync.receive_batch()
batch.messages.extend(batch2.messages)

assert batch.messages[0].offset == 0
assert batch.messages[0].seqno == 1
assert batch.messages[0].data == "123".encode()

# remove second recieve batch when implement batching
# https://github.com/ydb-platform/ydb-python-sdk/issues/142
batch = topic_reader_sync.receive_batch()
assert batch.messages[0].offset == 1
assert batch.messages[0].seqno == 2
assert batch.messages[0].data == "456".encode()
assert batch.messages[1].offset == 1
assert batch.messages[1].seqno == 2
assert batch.messages[1].data == "456".encode()

@pytest.mark.parametrize(
"codec",
Expand Down
40 changes: 29 additions & 11 deletions ydb/_topic_reader/topic_reader_asyncio.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import gzip
import typing
from asyncio import Task
from collections import deque
from collections import OrderedDict
from typing import Optional, Set, Dict, Union, Callable

import ydb
Expand Down Expand Up @@ -264,7 +264,7 @@ class ReaderStream:

_state_changed: asyncio.Event
_closed: bool
_message_batches: typing.Deque[datatypes.PublicBatch]
_message_batches: typing.Dict[int, datatypes.PublicBatch]
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
_message_batches: typing.Dict[int, datatypes.PublicBatch]
_message_batches: typing.Dict[int, datatypes.PublicBatch] # keys are partition session ID

_first_error: asyncio.Future[YdbError]

_update_token_interval: Union[int, float]
Expand Down Expand Up @@ -296,7 +296,7 @@ def __init__(
self._closed = False
self._first_error = asyncio.get_running_loop().create_future()
self._batches_to_decode = asyncio.Queue()
self._message_batches = deque()
self._message_batches = OrderedDict()

self._update_token_interval = settings.update_token_interval
self._get_token_function = get_token_function
Expand Down Expand Up @@ -359,29 +359,38 @@ async def wait_messages(self):
await self._state_changed.wait()
self._state_changed.clear()

def _get_first_batch(self) -> typing.Tuple[int, datatypes.PublicBatch]:
first_id, batch = self._message_batches.popitem(last=False)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

first_id -> partition_session_id - for understand mean of the number

return first_id, batch

def receive_batch_nowait(self):
if self._get_first_error():
raise self._get_first_error()

if not self._message_batches:
return None

batch = self._message_batches.popleft()
_, batch = self._get_first_batch()
self._buffer_release_bytes(batch._bytes_size)

return batch

def receive_message_nowait(self):
if self._get_first_error():
raise self._get_first_error()

try:
batch = self._message_batches[0]
message = batch.pop_message()
except IndexError:
if not self._message_batches:
return None

if batch.empty():
self.receive_batch_nowait()
part_sess_id, batch = self._get_first_batch()

message = batch.messages.pop(0)

if len(batch.messages) == 0:
self._buffer_release_bytes(batch._bytes_size)
else:
# TODO: we should somehow release bytes from single message as well
self._message_batches[part_sess_id] = batch

return message

Expand Down Expand Up @@ -605,9 +614,18 @@ async def _decode_batches_loop(self):
while True:
batch = await self._batches_to_decode.get()
await self._decode_batch_inplace(batch)
self._message_batches.append(batch)
self._add_batch_to_queue(batch)
self._state_changed.set()

def _add_batch_to_queue(self, batch: datatypes.PublicBatch):
part_sess_id = batch._partition_session.id
if part_sess_id in self._message_batches:
self._message_batches[part_sess_id].messages.extend(batch.messages)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about implement _push/_pop method for PublicBatch? it will be easer refctor internals in the future.

self._message_batches[part_sess_id]._bytes_size += batch._bytes_size
return

self._message_batches[part_sess_id] = batch

async def _decode_batch_inplace(self, batch):
if batch._codec == Codec.CODEC_RAW:
return
Expand Down
Loading
Loading