From 5d7602be53dcf74bb7d8fcc73209566e9c12c007 Mon Sep 17 00:00:00 2001 From: harishmohanraj Date: Mon, 9 Oct 2023 11:50:53 +0000 Subject: [PATCH 1/5] Copy files from faststream repo --- .pre-commit-config.yaml | 1 + .secrets.baseline | 15 +- search/docs/api/faststream/app/ABCApp.md | 3 + search/docs/api/faststream/app/FastStream.md | 3 + .../asyncapi/base/AsyncAPIOperation.md | 3 + .../generate/get_app_broker_channels.md | 3 + .../generate/get_app_broker_server.md | 3 + .../asyncapi/generate/get_app_schema.md | 3 + .../asyncapi/message/get_model_schema.md | 3 + .../asyncapi/message/get_response_schema.md | 3 + .../asyncapi/message/parse_handler_params.md | 3 + .../schema/bindings/amqp/ChannelBinding.md | 3 + .../asyncapi/schema/bindings/amqp/Exchange.md | 3 + .../schema/bindings/amqp/OperationBinding.md | 3 + .../asyncapi/schema/bindings/amqp/Queue.md | 3 + .../schema/bindings/amqp/ServerBinding.md | 3 + .../schema/bindings/kafka/ChannelBinding.md | 3 + .../schema/bindings/kafka/OperationBinding.md | 3 + .../schema/bindings/kafka/ServerBinding.md | 3 + .../schema/bindings/main/ChannelBinding.md | 3 + .../schema/bindings/main/OperationBinding.md | 3 + .../schema/bindings/main/ServerBinding.md | 3 + .../schema/bindings/nats/ChannelBinding.md | 3 + .../schema/bindings/nats/OperationBinding.md | 3 + .../schema/bindings/nats/ServerBinding.md | 3 + .../schema/bindings/redis/ChannelBinding.md | 3 + .../schema/bindings/redis/OperationBinding.md | 3 + .../schema/bindings/redis/ServerBinding.md | 3 + .../schema/bindings/sqs/ChannelBinding.md | 3 + .../schema/bindings/sqs/OperationBinding.md | 3 + .../schema/bindings/sqs/ServerBinding.md | 3 + .../asyncapi/schema/channels/Channel.md | 3 + .../asyncapi/schema/info/Contact.md | 3 + .../asyncapi/schema/info/ContactDict.md | 3 + .../asyncapi/schema/info/EmailStr.md | 3 + .../faststream/asyncapi/schema/info/Info.md | 3 + .../asyncapi/schema/info/License.md | 3 + .../asyncapi/schema/info/LicenseDict.md | 3 + .../asyncapi/schema/main/Components.md | 3 + .../faststream/asyncapi/schema/main/Schema.md | 3 + .../asyncapi/schema/message/CorrelationId.md | 3 + .../asyncapi/schema/message/Message.md | 3 + .../asyncapi/schema/operations/Operation.md | 3 + .../asyncapi/schema/security/OauthFlowObj.md | 3 + .../asyncapi/schema/security/OauthFlows.md | 3 + .../security/SecuritySchemaComponent.md | 3 + .../asyncapi/schema/servers/Server.md | 3 + .../asyncapi/schema/servers/ServerVariable.md | 3 + .../asyncapi/schema/utils/ExternalDocs.md | 3 + .../asyncapi/schema/utils/ExternalDocsDict.md | 3 + .../asyncapi/schema/utils/Parameter.md | 3 + .../asyncapi/schema/utils/Reference.md | 3 + .../faststream/asyncapi/schema/utils/Tag.md | 3 + .../asyncapi/schema/utils/TagDict.md | 3 + .../asyncapi/site/get_asyncapi_html.md | 3 + .../api/faststream/asyncapi/site/serve_app.md | 3 + .../asyncapi/utils/resolve_payloads.md | 3 + .../faststream/asyncapi/utils/to_camelcase.md | 3 + .../broker/core/abc/BrokerUsecase.md | 3 + .../broker/core/abc/extend_dependencies.md | 3 + .../core/asyncronous/BrokerAsyncUsecase.md | 3 + .../broker/core/asyncronous/default_filter.md | 3 + .../broker/core/mixins/LoggingMixin.md | 3 + .../broker/fastapi/route/StreamMessage.md | 3 + .../broker/fastapi/route/StreamRoute.md | 3 + .../broker/fastapi/route/get_app.md | 3 + .../broker/fastapi/router/StreamRouter.md | 3 + .../faststream/broker/handler/AsyncHandler.md | 3 + .../faststream/broker/handler/BaseHandler.md | 3 + .../broker/message/ABCStreamMessage.md | 3 + .../broker/message/StreamMessage.md | 3 + .../broker/message/SyncStreamMessage.md | 3 + .../broker/middlewares/BaseMiddleware.md | 3 + .../middlewares/CriticalLogMiddleware.md | 3 + .../broker/parsers/decode_message.md | 3 + .../broker/parsers/encode_message.md | 3 + .../broker/parsers/resolve_custom_func.md | 3 + .../broker/publisher/BasePublisher.md | 3 + .../broker/push_back_watcher/BaseWatcher.md | 3 + .../push_back_watcher/CounterWatcher.md | 3 + .../push_back_watcher/EndlessWatcher.md | 3 + .../broker/push_back_watcher/OneTryWatcher.md | 3 + .../push_back_watcher/WatcherContext.md | 3 + .../faststream/broker/router/BrokerRoute.md | 3 + .../faststream/broker/router/BrokerRouter.md | 3 + .../faststream/broker/schemas/NameRequired.md | 3 + .../faststream/broker/schemas/RawDecoced.md | 3 + .../broker/security/BaseSecurity.md | 3 + .../broker/security/SASLPlaintext.md | 3 + .../broker/security/SASLScram256.md | 3 + .../broker/security/SASLScram512.md | 3 + .../api/faststream/broker/test/TestApp.md | 3 + .../api/faststream/broker/test/TestBroker.md | 3 + .../faststream/broker/test/call_handler.md | 3 + .../broker/test/patch_broker_calls.md | 3 + .../broker/types/AsyncPublisherProtocol.md | 3 + .../broker/utils/change_logger_handlers.md | 3 + .../faststream/broker/utils/get_watcher.md | 3 + .../broker/utils/set_message_context.md | 3 + .../broker/wrapper/FakePublisher.md | 3 + .../broker/wrapper/HandlerCallWrapper.md | 3 + .../docs/api/faststream/cli/docs/app/gen.md | 3 + .../docs/api/faststream/cli/docs/app/serve.md | 3 + search/docs/api/faststream/cli/main/main.md | 3 + search/docs/api/faststream/cli/main/run.md | 3 + .../faststream/cli/main/version_callback.md | 3 + .../cli/supervisors/basereload/BaseReload.md | 3 + .../supervisors/multiprocess/Multiprocess.md | 3 + .../cli/supervisors/utils/get_subprocess.md | 3 + .../cli/supervisors/utils/set_exit.md | 3 + .../supervisors/utils/subprocess_started.md | 3 + .../supervisors/watchfiles/ExtendedFilter.md | 3 + .../supervisors/watchfiles/WatchReloader.md | 3 + .../cli/utils/imports/get_app_path.md | 3 + .../cli/utils/imports/import_object.md | 3 + .../cli/utils/imports/try_import_app.md | 3 + .../faststream/cli/utils/logs/LogLevels.md | 3 + .../cli/utils/logs/get_log_level.md | 3 + .../cli/utils/logs/set_log_level.md | 3 + .../cli/utils/parser/parse_cli_args.md | 3 + .../cli/utils/parser/remove_prefix.md | 3 + .../api/faststream/constants/ContentTypes.md | 3 + .../api/faststream/exceptions/AckMessage.md | 3 + .../faststream/exceptions/HandlerException.md | 3 + .../api/faststream/exceptions/NackMessage.md | 3 + .../faststream/exceptions/RejectMessage.md | 3 + .../api/faststream/exceptions/SkipMessage.md | 3 + .../api/faststream/exceptions/StopConsume.md | 3 + .../api/faststream/kafka/asyncapi/Handler.md | 3 + .../faststream/kafka/asyncapi/Publisher.md | 3 + .../faststream/kafka/broker/KafkaBroker.md | 3 + .../faststream/kafka/fastapi/KafkaRouter.md | 3 + .../faststream/kafka/handler/LogicHandler.md | 3 + .../faststream/kafka/message/KafkaMessage.md | 3 + .../faststream/kafka/parser/AioKafkaParser.md | 3 + .../kafka/producer/AioKafkaFastProducer.md | 3 + .../kafka/publisher/LogicPublisher.md | 3 + .../faststream/kafka/router/KafkaRouter.md | 3 + .../kafka/security/parse_security.md | 3 + .../kafka/shared/logging/KafkaLoggingMixin.md | 3 + .../kafka/shared/publisher/ABCPublisher.md | 3 + .../kafka/shared/router/KafkaRouter.md | 3 + .../schemas/ConsumerConnectionParams.md | 3 + .../api/faststream/kafka/test/FakeProducer.md | 3 + .../faststream/kafka/test/TestKafkaBroker.md | 3 + .../faststream/kafka/test/build_message.md | 3 + .../log/formatter/ColourizedFormatter.md | 3 + .../log/formatter/expand_log_field.md | 3 + .../log/formatter/make_record_with_extra.md | 3 + .../log/logging/configure_formatter.md | 3 + .../api/faststream/nats/asyncapi/Handler.md | 3 + .../api/faststream/nats/asyncapi/Publisher.md | 3 + .../api/faststream/nats/broker/NatsBroker.md | 3 + .../api/faststream/nats/fastapi/NatsRouter.md | 3 + .../nats/handler/LogicNatsHandler.md | 3 + .../faststream/nats/helpers/StreamBuilder.md | 3 + .../api/faststream/nats/js_stream/JStream.md | 3 + .../faststream/nats/message/NatsMessage.md | 3 + .../api/faststream/nats/parser/NatsParser.md | 3 + .../nats/producer/NatsFastProducer.md | 3 + .../nats/producer/NatsJSFastProducer.md | 3 + .../nats/publisher/LogicPublisher.md | 3 + .../api/faststream/nats/router/NatsRouter.md | 3 + .../nats/shared/logging/NatsLoggingMixin.md | 3 + .../nats/shared/router/NatsRouter.md | 3 + .../api/faststream/nats/test/FakeProducer.md | 3 + .../faststream/nats/test/PatchedMessage.md | 3 + .../faststream/nats/test/TestNatsBroker.md | 3 + .../api/faststream/nats/test/build_message.md | 3 + .../api/faststream/rabbit/asyncapi/Handler.md | 3 + .../faststream/rabbit/asyncapi/Publisher.md | 3 + .../rabbit/asyncapi/RMQAsyncAPIChannel.md | 3 + .../faststream/rabbit/broker/RabbitBroker.md | 3 + .../faststream/rabbit/fastapi/RabbitRouter.md | 3 + .../faststream/rabbit/handler/LogicHandler.md | 3 + .../rabbit/helpers/RabbitDeclarer.md | 3 + .../rabbit/message/RabbitMessage.md | 3 + .../faststream/rabbit/parser/AioPikaParser.md | 3 + .../rabbit/producer/AioPikaFastProducer.md | 3 + .../rabbit/publisher/LogicPublisher.md | 3 + .../faststream/rabbit/router/RabbitRouter.md | 3 + .../rabbit/security/parse_security.md | 3 + .../rabbit/shared/constants/ExchangeType.md | 3 + .../shared/logging/RabbitLoggingMixin.md | 3 + .../rabbit/shared/publisher/ABCPublisher.md | 3 + .../rabbit/shared/router/RabbitRouter.md | 3 + .../shared/schemas/BaseRMQInformation.md | 3 + .../rabbit/shared/schemas/RabbitExchange.md | 3 + .../rabbit/shared/schemas/RabbitQueue.md | 3 + .../rabbit/shared/schemas/get_routing_hash.md | 3 + .../faststream/rabbit/test/FakeProducer.md | 3 + .../faststream/rabbit/test/PatchedMessage.md | 3 + .../rabbit/test/TestRabbitBroker.md | 3 + .../faststream/rabbit/test/build_message.md | 3 + .../api/faststream/utils/classes/Singleton.md | 3 + .../utils/context/main/ContextRepo.md | 3 + .../faststream/utils/context/types/Context.md | 3 + .../utils/context/types/resolve_context.md | 3 + .../faststream/utils/data/filter_by_dict.md | 3 + .../get_function_positional_arguments.md | 3 + .../utils/functions/timeout_scope.md | 3 + .../faststream/utils/functions/to_async.md | 3 + .../api/faststream/utils/no_cast/NoCast.md | 3 + .../docs/getting-started/asyncapi/custom.md | 188 ++++++++++ .../docs/getting-started/asyncapi/export.md | 56 +++ .../docs/getting-started/asyncapi/hosting.md | 38 ++ search/docs/getting-started/cli/index.md | 128 +++++++ search/docs/getting-started/config/index.md | 167 +++++++++ search/docs/getting-started/context/custom.md | 83 +++++ .../docs/getting-started/context/existed.md | 82 ++++ search/docs/getting-started/context/extra.md | 60 +++ search/docs/getting-started/context/fields.md | 11 + search/docs/getting-started/context/index.md | 66 ++++ .../contributing/CONTRIBUTING.md | 143 +++++++ .../docs/getting-started/contributing/docs.md | 44 +++ .../getting-started/dependencies/class.md | 0 .../getting-started/dependencies/global.md | 3 + .../getting-started/dependencies/index.md | 163 ++++++++ .../docs/getting-started/dependencies/sub.md | 0 .../getting-started/dependencies/testing.md | 3 + .../getting-started/dependencies/yield.md | 0 search/docs/getting-started/index.md | 39 ++ .../integrations/fastapi/index.md | 73 ++++ .../integrations/frameworks/index.md | 16 + search/docs/getting-started/lifespan/hooks.md | 119 ++++++ search/docs/getting-started/lifespan/index.md | 11 + search/docs/getting-started/lifespan/test.md | 12 + search/docs/getting-started/logging.md | 208 +++++++++++ .../docs/getting-started/middlewares/index.md | 85 +++++ .../docs/getting-started/publishing/broker.md | 20 + .../getting-started/publishing/decorator.md | 34 ++ .../docs/getting-started/publishing/direct.md | 27 ++ .../docs/getting-started/publishing/index.md | 35 ++ .../docs/getting-started/publishing/object.md | 34 ++ .../docs/getting-started/publishing/test.md | 62 +++ search/docs/getting-started/routers/index.md | 51 +++ .../getting-started/serialization/decoder.md | 84 +++++ .../getting-started/serialization/examples.md | 123 ++++++ .../getting-started/serialization/index.md | 20 + .../getting-started/serialization/parser.md | 106 ++++++ .../subscription/annotation.md | 61 +++ .../getting-started/subscription/filtering.md | 60 +++ .../getting-started/subscription/index.md | 138 +++++++ .../getting-started/subscription/pydantic.md | 43 +++ .../docs/getting-started/subscription/test.md | 152 ++++++++ search/docs/getting-started/template/index.md | 207 ++++++++++ search/docs/index.md | 352 ++++++++++++++++++ .../docs/kafka/Publisher/batch_publisher.md | 96 +++++ search/docs/kafka/Publisher/index.md | 126 +++++++ search/docs/kafka/Publisher/using_a_key.md | 61 +++ .../docs/kafka/Subscriber/batch_subscriber.md | 58 +++ search/docs/kafka/Subscriber/index.md | 78 ++++ search/docs/kafka/ack.md | 88 +++++ search/docs/kafka/index.md | 61 +++ search/docs/kafka/message.md | 53 +++ search/docs/nats/examples/direct.md | 96 +++++ search/docs/nats/examples/pattern.md | 87 +++++ search/docs/nats/index.md | 35 ++ search/docs/nats/jetstream/ack.md | 78 ++++ search/docs/nats/jetstream/index.md | 53 +++ search/docs/nats/jetstream/key-value.md | 107 ++++++ search/docs/nats/jetstream/object.md | 111 ++++++ search/docs/nats/message.md | 104 ++++++ search/docs/nats/publishing/index.md | 40 ++ search/docs/nats/rpc.md | 45 +++ search/docs/rabbit/ack.md | 90 +++++ search/docs/rabbit/declare.md | 40 ++ search/docs/rabbit/examples/direct.md | 129 +++++++ search/docs/rabbit/examples/fanout.md | 93 +++++ search/docs/rabbit/examples/headers.md | 181 +++++++++ search/docs/rabbit/examples/index.md | 36 ++ search/docs/rabbit/examples/stream.md | 36 ++ search/docs/rabbit/examples/topic.md | 113 ++++++ search/docs/rabbit/index.md | 69 ++++ search/docs/rabbit/message.md | 104 ++++++ search/docs/rabbit/publishing.md | 83 +++++ search/docs/rabbit/rpc.md | 42 +++ search/docs/release.md | 36 ++ search/examples/__init__.py | 0 .../__init__.py | 0 .../example_add_and_publish_with_key/app.py | 55 +++ .../app_skeleton.py | 58 +++ .../description.txt | 12 + .../test_app.py | 33 ++ .../__init__.py | 0 .../example_add_and_publish_with_key2/app.py | 54 +++ .../app_skeleton.py | 54 +++ .../description.txt | 11 + .../test_app.py | 21 ++ .../__init__.py | 0 .../example_calculate_mean_temperature/app.py | 53 +++ .../app_skeleton.py | 57 +++ .../description.txt | 11 + .../test_app.py | 47 +++ .../__init__.py | 0 .../example_consume_publish_with_key/app.py | 36 ++ .../app_skeleton.py | 45 +++ .../description.txt | 10 + .../test_app.py | 30 ++ .../example_course_updates/__init__.py | 0 search/examples/example_course_updates/app.py | 38 ++ .../example_course_updates/app_skeleton.py | 40 ++ .../example_course_updates/description.txt | 4 + .../example_course_updates/test_app.py | 64 ++++ .../example_execute_trade/__init__.py | 0 search/examples/example_execute_trade/app.py | 33 ++ .../example_execute_trade/app_skeleton.py | 32 ++ .../example_execute_trade/description.txt | 3 + .../example_execute_trade/test_app.py | 36 ++ .../__init__.py | 0 .../example_forward_to_another_topic/app.py | 24 ++ .../app_skeleton.py | 31 ++ .../description.txt | 2 + .../test_app.py | 24 ++ .../__init__.py | 0 .../example_forward_to_multiple_topics/app.py | 14 + .../app_skeleton.py | 20 + .../description.txt | 2 + .../test_app.py | 29 ++ .../example_forward_with_security/__init__.py | 0 .../example_forward_with_security/app.py | 28 ++ .../app_skeleton.py | 36 ++ .../description.txt | 3 + .../example_forward_with_security/test_app.py | 24 ++ .../example_infinity_publishing/__init__.py | 0 .../example_infinity_publishing/app.py | 45 +++ .../app_skeleton.py | 47 +++ .../description.txt | 2 + .../example_infinity_publishing/test_app.py | 21 ++ .../example_investment_updates/__init__.py | 0 .../example_investment_updates/app.py | 32 ++ .../app_skeleton.py | 38 ++ .../description.txt | 3 + .../example_investment_updates/test_app.py | 42 +++ .../__init__.py | 0 .../app.py | 27 ++ .../app_skeleton.py | 34 ++ .../description.txt | 3 + .../test_app.py | 16 + .../examples/example_new_employee/__init__.py | 0 search/examples/example_new_employee/app.py | 31 ++ .../example_new_employee/app_skeleton.py | 35 ++ .../example_new_employee/description.txt | 5 + .../examples/example_new_employee/test_app.py | 33 ++ search/examples/example_pets/__init__.py | 0 search/examples/example_pets/app.py | 22 ++ search/examples/example_pets/app_skeleton.py | 28 ++ search/examples/example_pets/description.txt | 3 + search/examples/example_pets/test_app.py | 20 + search/examples/example_plants/__init__.py | 0 search/examples/example_plants/app.py | 27 ++ .../examples/example_plants/app_skeleton.py | 33 ++ .../examples/example_plants/description.txt | 2 + search/examples/example_plants/test_app.py | 43 +++ .../example_product_reviews/__init__.py | 0 .../examples/example_product_reviews/app.py | 37 ++ .../example_product_reviews/app_skeleton.py | 45 +++ .../example_product_reviews/description.txt | 3 + .../example_product_reviews/test_app.py | 54 +++ .../example_publish_with_key/__init__.py | 0 .../examples/example_publish_with_key/app.py | 28 ++ .../example_publish_with_key/app_skeleton.py | 37 ++ .../example_publish_with_key/description.txt | 8 + .../example_publish_with_key/test_app.py | 19 + .../example_scram256_security/__init__.py | 0 .../examples/example_scram256_security/app.py | 37 ++ .../example_scram256_security/app_skeleton.py | 43 +++ .../example_scram256_security/description.txt | 5 + .../example_scram256_security/test_app.py | 40 ++ .../example_scram512_security/__init__.py | 0 .../examples/example_scram512_security/app.py | 37 ++ .../example_scram512_security/app_skeleton.py | 44 +++ .../example_scram512_security/description.txt | 4 + .../example_scram512_security/test_app.py | 34 ++ .../example_scrape_weather/__init__.py | 0 search/examples/example_scrape_weather/app.py | 103 +++++ .../example_scrape_weather/app_skeleton.py | 89 +++++ .../example_scrape_weather/description.txt | 26 ++ .../example_scrape_weather/test_app.py | 19 + .../example_social_media_post/__init__.py | 0 .../examples/example_social_media_post/app.py | 34 ++ .../example_social_media_post/app_skeleton.py | 49 +++ .../example_social_media_post/description.txt | 4 + .../example_social_media_post/test_app.py | 38 ++ .../example_student_query/__init__.py | 0 search/examples/example_student_query/app.py | 46 +++ .../example_student_query/app_skeleton.py | 49 +++ .../example_student_query/description.txt | 4 + .../example_student_query/test_app.py | 58 +++ .../example_weather_updates/__init__.py | 0 .../examples/example_weather_updates/app.py | 30 ++ .../example_weather_updates/app_skeleton.py | 35 ++ .../example_weather_updates/description.txt | 3 + .../example_weather_updates/test_app.py | 44 +++ 394 files changed, 9225 insertions(+), 2 deletions(-) create mode 100644 search/docs/api/faststream/app/ABCApp.md create mode 100644 search/docs/api/faststream/app/FastStream.md create mode 100644 search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md create mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md create mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md create mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_schema.md create mode 100644 search/docs/api/faststream/asyncapi/message/get_model_schema.md create mode 100644 search/docs/api/faststream/asyncapi/message/get_response_schema.md create mode 100644 search/docs/api/faststream/asyncapi/message/parse_handler_params.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md create mode 100644 search/docs/api/faststream/asyncapi/schema/channels/Channel.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/Contact.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/ContactDict.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/EmailStr.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/Info.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/License.md create mode 100644 search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md create mode 100644 search/docs/api/faststream/asyncapi/schema/main/Components.md create mode 100644 search/docs/api/faststream/asyncapi/schema/main/Schema.md create mode 100644 search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md create mode 100644 search/docs/api/faststream/asyncapi/schema/message/Message.md create mode 100644 search/docs/api/faststream/asyncapi/schema/operations/Operation.md create mode 100644 search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md create mode 100644 search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md create mode 100644 search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md create mode 100644 search/docs/api/faststream/asyncapi/schema/servers/Server.md create mode 100644 search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Parameter.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Reference.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Tag.md create mode 100644 search/docs/api/faststream/asyncapi/schema/utils/TagDict.md create mode 100644 search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md create mode 100644 search/docs/api/faststream/asyncapi/site/serve_app.md create mode 100644 search/docs/api/faststream/asyncapi/utils/resolve_payloads.md create mode 100644 search/docs/api/faststream/asyncapi/utils/to_camelcase.md create mode 100644 search/docs/api/faststream/broker/core/abc/BrokerUsecase.md create mode 100644 search/docs/api/faststream/broker/core/abc/extend_dependencies.md create mode 100644 search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md create mode 100644 search/docs/api/faststream/broker/core/asyncronous/default_filter.md create mode 100644 search/docs/api/faststream/broker/core/mixins/LoggingMixin.md create mode 100644 search/docs/api/faststream/broker/fastapi/route/StreamMessage.md create mode 100644 search/docs/api/faststream/broker/fastapi/route/StreamRoute.md create mode 100644 search/docs/api/faststream/broker/fastapi/route/get_app.md create mode 100644 search/docs/api/faststream/broker/fastapi/router/StreamRouter.md create mode 100644 search/docs/api/faststream/broker/handler/AsyncHandler.md create mode 100644 search/docs/api/faststream/broker/handler/BaseHandler.md create mode 100644 search/docs/api/faststream/broker/message/ABCStreamMessage.md create mode 100644 search/docs/api/faststream/broker/message/StreamMessage.md create mode 100644 search/docs/api/faststream/broker/message/SyncStreamMessage.md create mode 100644 search/docs/api/faststream/broker/middlewares/BaseMiddleware.md create mode 100644 search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md create mode 100644 search/docs/api/faststream/broker/parsers/decode_message.md create mode 100644 search/docs/api/faststream/broker/parsers/encode_message.md create mode 100644 search/docs/api/faststream/broker/parsers/resolve_custom_func.md create mode 100644 search/docs/api/faststream/broker/publisher/BasePublisher.md create mode 100644 search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md create mode 100644 search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md create mode 100644 search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md create mode 100644 search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md create mode 100644 search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md create mode 100644 search/docs/api/faststream/broker/router/BrokerRoute.md create mode 100644 search/docs/api/faststream/broker/router/BrokerRouter.md create mode 100644 search/docs/api/faststream/broker/schemas/NameRequired.md create mode 100644 search/docs/api/faststream/broker/schemas/RawDecoced.md create mode 100644 search/docs/api/faststream/broker/security/BaseSecurity.md create mode 100644 search/docs/api/faststream/broker/security/SASLPlaintext.md create mode 100644 search/docs/api/faststream/broker/security/SASLScram256.md create mode 100644 search/docs/api/faststream/broker/security/SASLScram512.md create mode 100644 search/docs/api/faststream/broker/test/TestApp.md create mode 100644 search/docs/api/faststream/broker/test/TestBroker.md create mode 100644 search/docs/api/faststream/broker/test/call_handler.md create mode 100644 search/docs/api/faststream/broker/test/patch_broker_calls.md create mode 100644 search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md create mode 100644 search/docs/api/faststream/broker/utils/change_logger_handlers.md create mode 100644 search/docs/api/faststream/broker/utils/get_watcher.md create mode 100644 search/docs/api/faststream/broker/utils/set_message_context.md create mode 100644 search/docs/api/faststream/broker/wrapper/FakePublisher.md create mode 100644 search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md create mode 100644 search/docs/api/faststream/cli/docs/app/gen.md create mode 100644 search/docs/api/faststream/cli/docs/app/serve.md create mode 100644 search/docs/api/faststream/cli/main/main.md create mode 100644 search/docs/api/faststream/cli/main/run.md create mode 100644 search/docs/api/faststream/cli/main/version_callback.md create mode 100644 search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md create mode 100644 search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md create mode 100644 search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md create mode 100644 search/docs/api/faststream/cli/supervisors/utils/set_exit.md create mode 100644 search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md create mode 100644 search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md create mode 100644 search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md create mode 100644 search/docs/api/faststream/cli/utils/imports/get_app_path.md create mode 100644 search/docs/api/faststream/cli/utils/imports/import_object.md create mode 100644 search/docs/api/faststream/cli/utils/imports/try_import_app.md create mode 100644 search/docs/api/faststream/cli/utils/logs/LogLevels.md create mode 100644 search/docs/api/faststream/cli/utils/logs/get_log_level.md create mode 100644 search/docs/api/faststream/cli/utils/logs/set_log_level.md create mode 100644 search/docs/api/faststream/cli/utils/parser/parse_cli_args.md create mode 100644 search/docs/api/faststream/cli/utils/parser/remove_prefix.md create mode 100644 search/docs/api/faststream/constants/ContentTypes.md create mode 100644 search/docs/api/faststream/exceptions/AckMessage.md create mode 100644 search/docs/api/faststream/exceptions/HandlerException.md create mode 100644 search/docs/api/faststream/exceptions/NackMessage.md create mode 100644 search/docs/api/faststream/exceptions/RejectMessage.md create mode 100644 search/docs/api/faststream/exceptions/SkipMessage.md create mode 100644 search/docs/api/faststream/exceptions/StopConsume.md create mode 100644 search/docs/api/faststream/kafka/asyncapi/Handler.md create mode 100644 search/docs/api/faststream/kafka/asyncapi/Publisher.md create mode 100644 search/docs/api/faststream/kafka/broker/KafkaBroker.md create mode 100644 search/docs/api/faststream/kafka/fastapi/KafkaRouter.md create mode 100644 search/docs/api/faststream/kafka/handler/LogicHandler.md create mode 100644 search/docs/api/faststream/kafka/message/KafkaMessage.md create mode 100644 search/docs/api/faststream/kafka/parser/AioKafkaParser.md create mode 100644 search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md create mode 100644 search/docs/api/faststream/kafka/publisher/LogicPublisher.md create mode 100644 search/docs/api/faststream/kafka/router/KafkaRouter.md create mode 100644 search/docs/api/faststream/kafka/security/parse_security.md create mode 100644 search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md create mode 100644 search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md create mode 100644 search/docs/api/faststream/kafka/shared/router/KafkaRouter.md create mode 100644 search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md create mode 100644 search/docs/api/faststream/kafka/test/FakeProducer.md create mode 100644 search/docs/api/faststream/kafka/test/TestKafkaBroker.md create mode 100644 search/docs/api/faststream/kafka/test/build_message.md create mode 100644 search/docs/api/faststream/log/formatter/ColourizedFormatter.md create mode 100644 search/docs/api/faststream/log/formatter/expand_log_field.md create mode 100644 search/docs/api/faststream/log/formatter/make_record_with_extra.md create mode 100644 search/docs/api/faststream/log/logging/configure_formatter.md create mode 100644 search/docs/api/faststream/nats/asyncapi/Handler.md create mode 100644 search/docs/api/faststream/nats/asyncapi/Publisher.md create mode 100644 search/docs/api/faststream/nats/broker/NatsBroker.md create mode 100644 search/docs/api/faststream/nats/fastapi/NatsRouter.md create mode 100644 search/docs/api/faststream/nats/handler/LogicNatsHandler.md create mode 100644 search/docs/api/faststream/nats/helpers/StreamBuilder.md create mode 100644 search/docs/api/faststream/nats/js_stream/JStream.md create mode 100644 search/docs/api/faststream/nats/message/NatsMessage.md create mode 100644 search/docs/api/faststream/nats/parser/NatsParser.md create mode 100644 search/docs/api/faststream/nats/producer/NatsFastProducer.md create mode 100644 search/docs/api/faststream/nats/producer/NatsJSFastProducer.md create mode 100644 search/docs/api/faststream/nats/publisher/LogicPublisher.md create mode 100644 search/docs/api/faststream/nats/router/NatsRouter.md create mode 100644 search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md create mode 100644 search/docs/api/faststream/nats/shared/router/NatsRouter.md create mode 100644 search/docs/api/faststream/nats/test/FakeProducer.md create mode 100644 search/docs/api/faststream/nats/test/PatchedMessage.md create mode 100644 search/docs/api/faststream/nats/test/TestNatsBroker.md create mode 100644 search/docs/api/faststream/nats/test/build_message.md create mode 100644 search/docs/api/faststream/rabbit/asyncapi/Handler.md create mode 100644 search/docs/api/faststream/rabbit/asyncapi/Publisher.md create mode 100644 search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md create mode 100644 search/docs/api/faststream/rabbit/broker/RabbitBroker.md create mode 100644 search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md create mode 100644 search/docs/api/faststream/rabbit/handler/LogicHandler.md create mode 100644 search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md create mode 100644 search/docs/api/faststream/rabbit/message/RabbitMessage.md create mode 100644 search/docs/api/faststream/rabbit/parser/AioPikaParser.md create mode 100644 search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md create mode 100644 search/docs/api/faststream/rabbit/publisher/LogicPublisher.md create mode 100644 search/docs/api/faststream/rabbit/router/RabbitRouter.md create mode 100644 search/docs/api/faststream/rabbit/security/parse_security.md create mode 100644 search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md create mode 100644 search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md create mode 100644 search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md create mode 100644 search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md create mode 100644 search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md create mode 100644 search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md create mode 100644 search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md create mode 100644 search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md create mode 100644 search/docs/api/faststream/rabbit/test/FakeProducer.md create mode 100644 search/docs/api/faststream/rabbit/test/PatchedMessage.md create mode 100644 search/docs/api/faststream/rabbit/test/TestRabbitBroker.md create mode 100644 search/docs/api/faststream/rabbit/test/build_message.md create mode 100644 search/docs/api/faststream/utils/classes/Singleton.md create mode 100644 search/docs/api/faststream/utils/context/main/ContextRepo.md create mode 100644 search/docs/api/faststream/utils/context/types/Context.md create mode 100644 search/docs/api/faststream/utils/context/types/resolve_context.md create mode 100644 search/docs/api/faststream/utils/data/filter_by_dict.md create mode 100644 search/docs/api/faststream/utils/functions/get_function_positional_arguments.md create mode 100644 search/docs/api/faststream/utils/functions/timeout_scope.md create mode 100644 search/docs/api/faststream/utils/functions/to_async.md create mode 100644 search/docs/api/faststream/utils/no_cast/NoCast.md create mode 100644 search/docs/getting-started/asyncapi/custom.md create mode 100644 search/docs/getting-started/asyncapi/export.md create mode 100644 search/docs/getting-started/asyncapi/hosting.md create mode 100644 search/docs/getting-started/cli/index.md create mode 100644 search/docs/getting-started/config/index.md create mode 100644 search/docs/getting-started/context/custom.md create mode 100644 search/docs/getting-started/context/existed.md create mode 100644 search/docs/getting-started/context/extra.md create mode 100644 search/docs/getting-started/context/fields.md create mode 100644 search/docs/getting-started/context/index.md create mode 100644 search/docs/getting-started/contributing/CONTRIBUTING.md create mode 100644 search/docs/getting-started/contributing/docs.md create mode 100644 search/docs/getting-started/dependencies/class.md create mode 100644 search/docs/getting-started/dependencies/global.md create mode 100644 search/docs/getting-started/dependencies/index.md create mode 100644 search/docs/getting-started/dependencies/sub.md create mode 100644 search/docs/getting-started/dependencies/testing.md create mode 100644 search/docs/getting-started/dependencies/yield.md create mode 100644 search/docs/getting-started/index.md create mode 100644 search/docs/getting-started/integrations/fastapi/index.md create mode 100644 search/docs/getting-started/integrations/frameworks/index.md create mode 100644 search/docs/getting-started/lifespan/hooks.md create mode 100644 search/docs/getting-started/lifespan/index.md create mode 100644 search/docs/getting-started/lifespan/test.md create mode 100644 search/docs/getting-started/logging.md create mode 100644 search/docs/getting-started/middlewares/index.md create mode 100644 search/docs/getting-started/publishing/broker.md create mode 100644 search/docs/getting-started/publishing/decorator.md create mode 100644 search/docs/getting-started/publishing/direct.md create mode 100644 search/docs/getting-started/publishing/index.md create mode 100644 search/docs/getting-started/publishing/object.md create mode 100644 search/docs/getting-started/publishing/test.md create mode 100644 search/docs/getting-started/routers/index.md create mode 100644 search/docs/getting-started/serialization/decoder.md create mode 100644 search/docs/getting-started/serialization/examples.md create mode 100644 search/docs/getting-started/serialization/index.md create mode 100644 search/docs/getting-started/serialization/parser.md create mode 100644 search/docs/getting-started/subscription/annotation.md create mode 100644 search/docs/getting-started/subscription/filtering.md create mode 100644 search/docs/getting-started/subscription/index.md create mode 100644 search/docs/getting-started/subscription/pydantic.md create mode 100644 search/docs/getting-started/subscription/test.md create mode 100644 search/docs/getting-started/template/index.md create mode 100644 search/docs/index.md create mode 100644 search/docs/kafka/Publisher/batch_publisher.md create mode 100644 search/docs/kafka/Publisher/index.md create mode 100644 search/docs/kafka/Publisher/using_a_key.md create mode 100644 search/docs/kafka/Subscriber/batch_subscriber.md create mode 100644 search/docs/kafka/Subscriber/index.md create mode 100644 search/docs/kafka/ack.md create mode 100644 search/docs/kafka/index.md create mode 100644 search/docs/kafka/message.md create mode 100644 search/docs/nats/examples/direct.md create mode 100644 search/docs/nats/examples/pattern.md create mode 100644 search/docs/nats/index.md create mode 100644 search/docs/nats/jetstream/ack.md create mode 100644 search/docs/nats/jetstream/index.md create mode 100644 search/docs/nats/jetstream/key-value.md create mode 100644 search/docs/nats/jetstream/object.md create mode 100644 search/docs/nats/message.md create mode 100644 search/docs/nats/publishing/index.md create mode 100644 search/docs/nats/rpc.md create mode 100644 search/docs/rabbit/ack.md create mode 100644 search/docs/rabbit/declare.md create mode 100644 search/docs/rabbit/examples/direct.md create mode 100644 search/docs/rabbit/examples/fanout.md create mode 100644 search/docs/rabbit/examples/headers.md create mode 100644 search/docs/rabbit/examples/index.md create mode 100644 search/docs/rabbit/examples/stream.md create mode 100644 search/docs/rabbit/examples/topic.md create mode 100644 search/docs/rabbit/index.md create mode 100644 search/docs/rabbit/message.md create mode 100644 search/docs/rabbit/publishing.md create mode 100644 search/docs/rabbit/rpc.md create mode 100644 search/docs/release.md create mode 100644 search/examples/__init__.py create mode 100644 search/examples/example_add_and_publish_with_key/__init__.py create mode 100644 search/examples/example_add_and_publish_with_key/app.py create mode 100644 search/examples/example_add_and_publish_with_key/app_skeleton.py create mode 100644 search/examples/example_add_and_publish_with_key/description.txt create mode 100644 search/examples/example_add_and_publish_with_key/test_app.py create mode 100644 search/examples/example_add_and_publish_with_key2/__init__.py create mode 100644 search/examples/example_add_and_publish_with_key2/app.py create mode 100644 search/examples/example_add_and_publish_with_key2/app_skeleton.py create mode 100644 search/examples/example_add_and_publish_with_key2/description.txt create mode 100644 search/examples/example_add_and_publish_with_key2/test_app.py create mode 100644 search/examples/example_calculate_mean_temperature/__init__.py create mode 100644 search/examples/example_calculate_mean_temperature/app.py create mode 100644 search/examples/example_calculate_mean_temperature/app_skeleton.py create mode 100644 search/examples/example_calculate_mean_temperature/description.txt create mode 100644 search/examples/example_calculate_mean_temperature/test_app.py create mode 100644 search/examples/example_consume_publish_with_key/__init__.py create mode 100644 search/examples/example_consume_publish_with_key/app.py create mode 100644 search/examples/example_consume_publish_with_key/app_skeleton.py create mode 100644 search/examples/example_consume_publish_with_key/description.txt create mode 100644 search/examples/example_consume_publish_with_key/test_app.py create mode 100644 search/examples/example_course_updates/__init__.py create mode 100644 search/examples/example_course_updates/app.py create mode 100644 search/examples/example_course_updates/app_skeleton.py create mode 100644 search/examples/example_course_updates/description.txt create mode 100644 search/examples/example_course_updates/test_app.py create mode 100644 search/examples/example_execute_trade/__init__.py create mode 100644 search/examples/example_execute_trade/app.py create mode 100644 search/examples/example_execute_trade/app_skeleton.py create mode 100644 search/examples/example_execute_trade/description.txt create mode 100644 search/examples/example_execute_trade/test_app.py create mode 100644 search/examples/example_forward_to_another_topic/__init__.py create mode 100644 search/examples/example_forward_to_another_topic/app.py create mode 100644 search/examples/example_forward_to_another_topic/app_skeleton.py create mode 100644 search/examples/example_forward_to_another_topic/description.txt create mode 100644 search/examples/example_forward_to_another_topic/test_app.py create mode 100644 search/examples/example_forward_to_multiple_topics/__init__.py create mode 100644 search/examples/example_forward_to_multiple_topics/app.py create mode 100644 search/examples/example_forward_to_multiple_topics/app_skeleton.py create mode 100644 search/examples/example_forward_to_multiple_topics/description.txt create mode 100644 search/examples/example_forward_to_multiple_topics/test_app.py create mode 100644 search/examples/example_forward_with_security/__init__.py create mode 100644 search/examples/example_forward_with_security/app.py create mode 100644 search/examples/example_forward_with_security/app_skeleton.py create mode 100644 search/examples/example_forward_with_security/description.txt create mode 100644 search/examples/example_forward_with_security/test_app.py create mode 100644 search/examples/example_infinity_publishing/__init__.py create mode 100644 search/examples/example_infinity_publishing/app.py create mode 100644 search/examples/example_infinity_publishing/app_skeleton.py create mode 100644 search/examples/example_infinity_publishing/description.txt create mode 100644 search/examples/example_infinity_publishing/test_app.py create mode 100644 search/examples/example_investment_updates/__init__.py create mode 100644 search/examples/example_investment_updates/app.py create mode 100644 search/examples/example_investment_updates/app_skeleton.py create mode 100644 search/examples/example_investment_updates/description.txt create mode 100644 search/examples/example_investment_updates/test_app.py create mode 100644 search/examples/example_log_msgs_with_plaintext_security/__init__.py create mode 100644 search/examples/example_log_msgs_with_plaintext_security/app.py create mode 100644 search/examples/example_log_msgs_with_plaintext_security/app_skeleton.py create mode 100644 search/examples/example_log_msgs_with_plaintext_security/description.txt create mode 100644 search/examples/example_log_msgs_with_plaintext_security/test_app.py create mode 100644 search/examples/example_new_employee/__init__.py create mode 100644 search/examples/example_new_employee/app.py create mode 100644 search/examples/example_new_employee/app_skeleton.py create mode 100644 search/examples/example_new_employee/description.txt create mode 100644 search/examples/example_new_employee/test_app.py create mode 100644 search/examples/example_pets/__init__.py create mode 100644 search/examples/example_pets/app.py create mode 100644 search/examples/example_pets/app_skeleton.py create mode 100644 search/examples/example_pets/description.txt create mode 100644 search/examples/example_pets/test_app.py create mode 100644 search/examples/example_plants/__init__.py create mode 100644 search/examples/example_plants/app.py create mode 100644 search/examples/example_plants/app_skeleton.py create mode 100644 search/examples/example_plants/description.txt create mode 100644 search/examples/example_plants/test_app.py create mode 100644 search/examples/example_product_reviews/__init__.py create mode 100644 search/examples/example_product_reviews/app.py create mode 100644 search/examples/example_product_reviews/app_skeleton.py create mode 100644 search/examples/example_product_reviews/description.txt create mode 100644 search/examples/example_product_reviews/test_app.py create mode 100644 search/examples/example_publish_with_key/__init__.py create mode 100644 search/examples/example_publish_with_key/app.py create mode 100644 search/examples/example_publish_with_key/app_skeleton.py create mode 100644 search/examples/example_publish_with_key/description.txt create mode 100644 search/examples/example_publish_with_key/test_app.py create mode 100644 search/examples/example_scram256_security/__init__.py create mode 100644 search/examples/example_scram256_security/app.py create mode 100644 search/examples/example_scram256_security/app_skeleton.py create mode 100644 search/examples/example_scram256_security/description.txt create mode 100644 search/examples/example_scram256_security/test_app.py create mode 100644 search/examples/example_scram512_security/__init__.py create mode 100644 search/examples/example_scram512_security/app.py create mode 100644 search/examples/example_scram512_security/app_skeleton.py create mode 100644 search/examples/example_scram512_security/description.txt create mode 100644 search/examples/example_scram512_security/test_app.py create mode 100644 search/examples/example_scrape_weather/__init__.py create mode 100644 search/examples/example_scrape_weather/app.py create mode 100644 search/examples/example_scrape_weather/app_skeleton.py create mode 100644 search/examples/example_scrape_weather/description.txt create mode 100644 search/examples/example_scrape_weather/test_app.py create mode 100644 search/examples/example_social_media_post/__init__.py create mode 100644 search/examples/example_social_media_post/app.py create mode 100644 search/examples/example_social_media_post/app_skeleton.py create mode 100644 search/examples/example_social_media_post/description.txt create mode 100644 search/examples/example_social_media_post/test_app.py create mode 100644 search/examples/example_student_query/__init__.py create mode 100644 search/examples/example_student_query/app.py create mode 100644 search/examples/example_student_query/app_skeleton.py create mode 100644 search/examples/example_student_query/description.txt create mode 100644 search/examples/example_student_query/test_app.py create mode 100644 search/examples/example_weather_updates/__init__.py create mode 100644 search/examples/example_weather_updates/app.py create mode 100644 search/examples/example_weather_updates/app_skeleton.py create mode 100644 search/examples/example_weather_updates/description.txt create mode 100644 search/examples/example_weather_updates/test_app.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d5752f2..f8c0bf1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,6 +2,7 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks +exclude: ^search/examples/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: "v4.4.0" diff --git a/.secrets.baseline b/.secrets.baseline index 434c1d6..eabcebe 100644 --- a/.secrets.baseline +++ b/.secrets.baseline @@ -111,6 +111,17 @@ "path": "detect_secrets.filters.heuristic.is_templated_secret" } ], - "results": {}, - "generated_at": "2023-08-08T10:13:31Z" + "results": { + "search/docs/getting-started/config/index.md": [ + { + "type": "Basic Auth Credentials", + "filename": "search/docs/getting-started/config/index.md", + "hashed_secret": "35675e68f4b5af7b995d9205ad0fc43842f16450", + "is_verified": false, + "line_number": 95, + "is_secret": false + } + ] + }, + "generated_at": "2023-10-09T11:12:30Z" } diff --git a/search/docs/api/faststream/app/ABCApp.md b/search/docs/api/faststream/app/ABCApp.md new file mode 100644 index 0000000..7f2d1b8 --- /dev/null +++ b/search/docs/api/faststream/app/ABCApp.md @@ -0,0 +1,3 @@ + + +::: faststream.app.ABCApp diff --git a/search/docs/api/faststream/app/FastStream.md b/search/docs/api/faststream/app/FastStream.md new file mode 100644 index 0000000..769b229 --- /dev/null +++ b/search/docs/api/faststream/app/FastStream.md @@ -0,0 +1,3 @@ + + +::: faststream.app.FastStream diff --git a/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md b/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md new file mode 100644 index 0000000..8b9a5b3 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.base.AsyncAPIOperation diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md b/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md new file mode 100644 index 0000000..4968e92 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.generate.get_app_broker_channels diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md b/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md new file mode 100644 index 0000000..97bce7f --- /dev/null +++ b/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.generate.get_app_broker_server diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_schema.md b/search/docs/api/faststream/asyncapi/generate/get_app_schema.md new file mode 100644 index 0000000..77e1a64 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/generate/get_app_schema.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.generate.get_app_schema diff --git a/search/docs/api/faststream/asyncapi/message/get_model_schema.md b/search/docs/api/faststream/asyncapi/message/get_model_schema.md new file mode 100644 index 0000000..c3ec4bb --- /dev/null +++ b/search/docs/api/faststream/asyncapi/message/get_model_schema.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.message.get_model_schema diff --git a/search/docs/api/faststream/asyncapi/message/get_response_schema.md b/search/docs/api/faststream/asyncapi/message/get_response_schema.md new file mode 100644 index 0000000..3a8eb1e --- /dev/null +++ b/search/docs/api/faststream/asyncapi/message/get_response_schema.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.message.get_response_schema diff --git a/search/docs/api/faststream/asyncapi/message/parse_handler_params.md b/search/docs/api/faststream/asyncapi/message/parse_handler_params.md new file mode 100644 index 0000000..c530266 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/message/parse_handler_params.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.message.parse_handler_params diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md new file mode 100644 index 0000000..443e316 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.amqp.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md new file mode 100644 index 0000000..f38ab3e --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.amqp.Exchange diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md new file mode 100644 index 0000000..ced9de0 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.amqp.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md new file mode 100644 index 0000000..47ce84c --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.amqp.Queue diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md new file mode 100644 index 0000000..29d4033 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.amqp.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md new file mode 100644 index 0000000..e3e9748 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.kafka.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md new file mode 100644 index 0000000..b231a6f --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.kafka.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md new file mode 100644 index 0000000..85183b4 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.kafka.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md new file mode 100644 index 0000000..8db79c7 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.main.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md new file mode 100644 index 0000000..0492f07 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.main.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md new file mode 100644 index 0000000..3250e4d --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.main.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md new file mode 100644 index 0000000..1195aa6 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.nats.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md new file mode 100644 index 0000000..5c93a78 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.nats.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md new file mode 100644 index 0000000..8cde582 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.nats.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md new file mode 100644 index 0000000..44dc374 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.redis.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md new file mode 100644 index 0000000..1d8b38e --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.redis.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md new file mode 100644 index 0000000..25ae5b8 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.redis.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md new file mode 100644 index 0000000..da40112 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.sqs.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md new file mode 100644 index 0000000..a8bcaca --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.sqs.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md new file mode 100644 index 0000000..f3228be --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.bindings.sqs.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/channels/Channel.md b/search/docs/api/faststream/asyncapi/schema/channels/Channel.md new file mode 100644 index 0000000..b283b20 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/channels/Channel.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.channels.Channel diff --git a/search/docs/api/faststream/asyncapi/schema/info/Contact.md b/search/docs/api/faststream/asyncapi/schema/info/Contact.md new file mode 100644 index 0000000..bcc8ff0 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/Contact.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.Contact diff --git a/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md b/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md new file mode 100644 index 0000000..ffd0af9 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.ContactDict diff --git a/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md b/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md new file mode 100644 index 0000000..eb5ba9d --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.EmailStr diff --git a/search/docs/api/faststream/asyncapi/schema/info/Info.md b/search/docs/api/faststream/asyncapi/schema/info/Info.md new file mode 100644 index 0000000..ba556ec --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/Info.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.Info diff --git a/search/docs/api/faststream/asyncapi/schema/info/License.md b/search/docs/api/faststream/asyncapi/schema/info/License.md new file mode 100644 index 0000000..0be1fa3 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/License.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.License diff --git a/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md b/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md new file mode 100644 index 0000000..f6e4541 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.info.LicenseDict diff --git a/search/docs/api/faststream/asyncapi/schema/main/Components.md b/search/docs/api/faststream/asyncapi/schema/main/Components.md new file mode 100644 index 0000000..1e70196 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/main/Components.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.main.Components diff --git a/search/docs/api/faststream/asyncapi/schema/main/Schema.md b/search/docs/api/faststream/asyncapi/schema/main/Schema.md new file mode 100644 index 0000000..0c1aee8 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/main/Schema.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.main.Schema diff --git a/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md b/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md new file mode 100644 index 0000000..fb1b81c --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.message.CorrelationId diff --git a/search/docs/api/faststream/asyncapi/schema/message/Message.md b/search/docs/api/faststream/asyncapi/schema/message/Message.md new file mode 100644 index 0000000..194c715 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/message/Message.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.message.Message diff --git a/search/docs/api/faststream/asyncapi/schema/operations/Operation.md b/search/docs/api/faststream/asyncapi/schema/operations/Operation.md new file mode 100644 index 0000000..9e331ce --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/operations/Operation.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.operations.Operation diff --git a/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md b/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md new file mode 100644 index 0000000..adf303b --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.security.OauthFlowObj diff --git a/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md b/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md new file mode 100644 index 0000000..6e322f2 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.security.OauthFlows diff --git a/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md b/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md new file mode 100644 index 0000000..69329cb --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.security.SecuritySchemaComponent diff --git a/search/docs/api/faststream/asyncapi/schema/servers/Server.md b/search/docs/api/faststream/asyncapi/schema/servers/Server.md new file mode 100644 index 0000000..48c47c5 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/servers/Server.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.servers.Server diff --git a/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md b/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md new file mode 100644 index 0000000..d34e6ca --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.servers.ServerVariable diff --git a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md new file mode 100644 index 0000000..aca23a7 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.ExternalDocs diff --git a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md new file mode 100644 index 0000000..b9e5448 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.ExternalDocsDict diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md b/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md new file mode 100644 index 0000000..d812f0d --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.Parameter diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Reference.md b/search/docs/api/faststream/asyncapi/schema/utils/Reference.md new file mode 100644 index 0000000..8af2feb --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/Reference.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.Reference diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Tag.md b/search/docs/api/faststream/asyncapi/schema/utils/Tag.md new file mode 100644 index 0000000..581e235 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/Tag.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.Tag diff --git a/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md b/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md new file mode 100644 index 0000000..66cec99 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.schema.utils.TagDict diff --git a/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md b/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md new file mode 100644 index 0000000..e98f7d4 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.site.get_asyncapi_html diff --git a/search/docs/api/faststream/asyncapi/site/serve_app.md b/search/docs/api/faststream/asyncapi/site/serve_app.md new file mode 100644 index 0000000..18f9b0d --- /dev/null +++ b/search/docs/api/faststream/asyncapi/site/serve_app.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.site.serve_app diff --git a/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md b/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md new file mode 100644 index 0000000..40809e1 --- /dev/null +++ b/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.utils.resolve_payloads diff --git a/search/docs/api/faststream/asyncapi/utils/to_camelcase.md b/search/docs/api/faststream/asyncapi/utils/to_camelcase.md new file mode 100644 index 0000000..40cd64a --- /dev/null +++ b/search/docs/api/faststream/asyncapi/utils/to_camelcase.md @@ -0,0 +1,3 @@ + + +::: faststream.asyncapi.utils.to_camelcase diff --git a/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md b/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md new file mode 100644 index 0000000..4a1c3c9 --- /dev/null +++ b/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.core.abc.BrokerUsecase diff --git a/search/docs/api/faststream/broker/core/abc/extend_dependencies.md b/search/docs/api/faststream/broker/core/abc/extend_dependencies.md new file mode 100644 index 0000000..d89d80e --- /dev/null +++ b/search/docs/api/faststream/broker/core/abc/extend_dependencies.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.core.abc.extend_dependencies diff --git a/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md b/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md new file mode 100644 index 0000000..507344a --- /dev/null +++ b/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.core.asyncronous.BrokerAsyncUsecase diff --git a/search/docs/api/faststream/broker/core/asyncronous/default_filter.md b/search/docs/api/faststream/broker/core/asyncronous/default_filter.md new file mode 100644 index 0000000..b40ad64 --- /dev/null +++ b/search/docs/api/faststream/broker/core/asyncronous/default_filter.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.core.asyncronous.default_filter diff --git a/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md b/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md new file mode 100644 index 0000000..74154ac --- /dev/null +++ b/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.core.mixins.LoggingMixin diff --git a/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md b/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md new file mode 100644 index 0000000..7b19856 --- /dev/null +++ b/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.fastapi.route.StreamMessage diff --git a/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md b/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md new file mode 100644 index 0000000..e067a18 --- /dev/null +++ b/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.fastapi.route.StreamRoute diff --git a/search/docs/api/faststream/broker/fastapi/route/get_app.md b/search/docs/api/faststream/broker/fastapi/route/get_app.md new file mode 100644 index 0000000..3df3d57 --- /dev/null +++ b/search/docs/api/faststream/broker/fastapi/route/get_app.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.fastapi.route.get_app diff --git a/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md b/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md new file mode 100644 index 0000000..f0ee7f0 --- /dev/null +++ b/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.fastapi.router.StreamRouter diff --git a/search/docs/api/faststream/broker/handler/AsyncHandler.md b/search/docs/api/faststream/broker/handler/AsyncHandler.md new file mode 100644 index 0000000..cc291a0 --- /dev/null +++ b/search/docs/api/faststream/broker/handler/AsyncHandler.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.handler.AsyncHandler diff --git a/search/docs/api/faststream/broker/handler/BaseHandler.md b/search/docs/api/faststream/broker/handler/BaseHandler.md new file mode 100644 index 0000000..193dfad --- /dev/null +++ b/search/docs/api/faststream/broker/handler/BaseHandler.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.handler.BaseHandler diff --git a/search/docs/api/faststream/broker/message/ABCStreamMessage.md b/search/docs/api/faststream/broker/message/ABCStreamMessage.md new file mode 100644 index 0000000..7b8f52f --- /dev/null +++ b/search/docs/api/faststream/broker/message/ABCStreamMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.message.ABCStreamMessage diff --git a/search/docs/api/faststream/broker/message/StreamMessage.md b/search/docs/api/faststream/broker/message/StreamMessage.md new file mode 100644 index 0000000..f87c3e8 --- /dev/null +++ b/search/docs/api/faststream/broker/message/StreamMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.message.StreamMessage diff --git a/search/docs/api/faststream/broker/message/SyncStreamMessage.md b/search/docs/api/faststream/broker/message/SyncStreamMessage.md new file mode 100644 index 0000000..b1c8c34 --- /dev/null +++ b/search/docs/api/faststream/broker/message/SyncStreamMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.message.SyncStreamMessage diff --git a/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md b/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md new file mode 100644 index 0000000..417d231 --- /dev/null +++ b/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.middlewares.BaseMiddleware diff --git a/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md b/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md new file mode 100644 index 0000000..fe8a095 --- /dev/null +++ b/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.middlewares.CriticalLogMiddleware diff --git a/search/docs/api/faststream/broker/parsers/decode_message.md b/search/docs/api/faststream/broker/parsers/decode_message.md new file mode 100644 index 0000000..8664120 --- /dev/null +++ b/search/docs/api/faststream/broker/parsers/decode_message.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.parsers.decode_message diff --git a/search/docs/api/faststream/broker/parsers/encode_message.md b/search/docs/api/faststream/broker/parsers/encode_message.md new file mode 100644 index 0000000..0fb74e3 --- /dev/null +++ b/search/docs/api/faststream/broker/parsers/encode_message.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.parsers.encode_message diff --git a/search/docs/api/faststream/broker/parsers/resolve_custom_func.md b/search/docs/api/faststream/broker/parsers/resolve_custom_func.md new file mode 100644 index 0000000..3e8ab0d --- /dev/null +++ b/search/docs/api/faststream/broker/parsers/resolve_custom_func.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.parsers.resolve_custom_func diff --git a/search/docs/api/faststream/broker/publisher/BasePublisher.md b/search/docs/api/faststream/broker/publisher/BasePublisher.md new file mode 100644 index 0000000..8118cc4 --- /dev/null +++ b/search/docs/api/faststream/broker/publisher/BasePublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.publisher.BasePublisher diff --git a/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md new file mode 100644 index 0000000..3a74eed --- /dev/null +++ b/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.push_back_watcher.BaseWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md new file mode 100644 index 0000000..e8616c5 --- /dev/null +++ b/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.push_back_watcher.CounterWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md new file mode 100644 index 0000000..2ad32a8 --- /dev/null +++ b/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.push_back_watcher.EndlessWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md new file mode 100644 index 0000000..736f601 --- /dev/null +++ b/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.push_back_watcher.OneTryWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md b/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md new file mode 100644 index 0000000..773e3cf --- /dev/null +++ b/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.push_back_watcher.WatcherContext diff --git a/search/docs/api/faststream/broker/router/BrokerRoute.md b/search/docs/api/faststream/broker/router/BrokerRoute.md new file mode 100644 index 0000000..c3b2c4a --- /dev/null +++ b/search/docs/api/faststream/broker/router/BrokerRoute.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.router.BrokerRoute diff --git a/search/docs/api/faststream/broker/router/BrokerRouter.md b/search/docs/api/faststream/broker/router/BrokerRouter.md new file mode 100644 index 0000000..adecbf2 --- /dev/null +++ b/search/docs/api/faststream/broker/router/BrokerRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.router.BrokerRouter diff --git a/search/docs/api/faststream/broker/schemas/NameRequired.md b/search/docs/api/faststream/broker/schemas/NameRequired.md new file mode 100644 index 0000000..105588f --- /dev/null +++ b/search/docs/api/faststream/broker/schemas/NameRequired.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.schemas.NameRequired diff --git a/search/docs/api/faststream/broker/schemas/RawDecoced.md b/search/docs/api/faststream/broker/schemas/RawDecoced.md new file mode 100644 index 0000000..968a1ee --- /dev/null +++ b/search/docs/api/faststream/broker/schemas/RawDecoced.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.schemas.RawDecoced diff --git a/search/docs/api/faststream/broker/security/BaseSecurity.md b/search/docs/api/faststream/broker/security/BaseSecurity.md new file mode 100644 index 0000000..a607c21 --- /dev/null +++ b/search/docs/api/faststream/broker/security/BaseSecurity.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.security.BaseSecurity diff --git a/search/docs/api/faststream/broker/security/SASLPlaintext.md b/search/docs/api/faststream/broker/security/SASLPlaintext.md new file mode 100644 index 0000000..4d56212 --- /dev/null +++ b/search/docs/api/faststream/broker/security/SASLPlaintext.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.security.SASLPlaintext diff --git a/search/docs/api/faststream/broker/security/SASLScram256.md b/search/docs/api/faststream/broker/security/SASLScram256.md new file mode 100644 index 0000000..29946ef --- /dev/null +++ b/search/docs/api/faststream/broker/security/SASLScram256.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.security.SASLScram256 diff --git a/search/docs/api/faststream/broker/security/SASLScram512.md b/search/docs/api/faststream/broker/security/SASLScram512.md new file mode 100644 index 0000000..400ba46 --- /dev/null +++ b/search/docs/api/faststream/broker/security/SASLScram512.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.security.SASLScram512 diff --git a/search/docs/api/faststream/broker/test/TestApp.md b/search/docs/api/faststream/broker/test/TestApp.md new file mode 100644 index 0000000..b659d00 --- /dev/null +++ b/search/docs/api/faststream/broker/test/TestApp.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.test.TestApp diff --git a/search/docs/api/faststream/broker/test/TestBroker.md b/search/docs/api/faststream/broker/test/TestBroker.md new file mode 100644 index 0000000..0094259 --- /dev/null +++ b/search/docs/api/faststream/broker/test/TestBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.test.TestBroker diff --git a/search/docs/api/faststream/broker/test/call_handler.md b/search/docs/api/faststream/broker/test/call_handler.md new file mode 100644 index 0000000..6a3341b --- /dev/null +++ b/search/docs/api/faststream/broker/test/call_handler.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.test.call_handler diff --git a/search/docs/api/faststream/broker/test/patch_broker_calls.md b/search/docs/api/faststream/broker/test/patch_broker_calls.md new file mode 100644 index 0000000..64606a2 --- /dev/null +++ b/search/docs/api/faststream/broker/test/patch_broker_calls.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.test.patch_broker_calls diff --git a/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md b/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md new file mode 100644 index 0000000..9b39782 --- /dev/null +++ b/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.types.AsyncPublisherProtocol diff --git a/search/docs/api/faststream/broker/utils/change_logger_handlers.md b/search/docs/api/faststream/broker/utils/change_logger_handlers.md new file mode 100644 index 0000000..0522ea1 --- /dev/null +++ b/search/docs/api/faststream/broker/utils/change_logger_handlers.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.utils.change_logger_handlers diff --git a/search/docs/api/faststream/broker/utils/get_watcher.md b/search/docs/api/faststream/broker/utils/get_watcher.md new file mode 100644 index 0000000..7d6f0cc --- /dev/null +++ b/search/docs/api/faststream/broker/utils/get_watcher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.utils.get_watcher diff --git a/search/docs/api/faststream/broker/utils/set_message_context.md b/search/docs/api/faststream/broker/utils/set_message_context.md new file mode 100644 index 0000000..d8682fd --- /dev/null +++ b/search/docs/api/faststream/broker/utils/set_message_context.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.utils.set_message_context diff --git a/search/docs/api/faststream/broker/wrapper/FakePublisher.md b/search/docs/api/faststream/broker/wrapper/FakePublisher.md new file mode 100644 index 0000000..4f9ec95 --- /dev/null +++ b/search/docs/api/faststream/broker/wrapper/FakePublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.wrapper.FakePublisher diff --git a/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md b/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md new file mode 100644 index 0000000..fe9c10f --- /dev/null +++ b/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md @@ -0,0 +1,3 @@ + + +::: faststream.broker.wrapper.HandlerCallWrapper diff --git a/search/docs/api/faststream/cli/docs/app/gen.md b/search/docs/api/faststream/cli/docs/app/gen.md new file mode 100644 index 0000000..4c512fd --- /dev/null +++ b/search/docs/api/faststream/cli/docs/app/gen.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.docs.app.gen diff --git a/search/docs/api/faststream/cli/docs/app/serve.md b/search/docs/api/faststream/cli/docs/app/serve.md new file mode 100644 index 0000000..aaabc7a --- /dev/null +++ b/search/docs/api/faststream/cli/docs/app/serve.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.docs.app.serve diff --git a/search/docs/api/faststream/cli/main/main.md b/search/docs/api/faststream/cli/main/main.md new file mode 100644 index 0000000..3de3347 --- /dev/null +++ b/search/docs/api/faststream/cli/main/main.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.main.main diff --git a/search/docs/api/faststream/cli/main/run.md b/search/docs/api/faststream/cli/main/run.md new file mode 100644 index 0000000..7c49ec6 --- /dev/null +++ b/search/docs/api/faststream/cli/main/run.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.main.run diff --git a/search/docs/api/faststream/cli/main/version_callback.md b/search/docs/api/faststream/cli/main/version_callback.md new file mode 100644 index 0000000..2889fc6 --- /dev/null +++ b/search/docs/api/faststream/cli/main/version_callback.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.main.version_callback diff --git a/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md b/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md new file mode 100644 index 0000000..37848d0 --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.basereload.BaseReload diff --git a/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md b/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md new file mode 100644 index 0000000..ebd2fc6 --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.multiprocess.Multiprocess diff --git a/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md b/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md new file mode 100644 index 0000000..159ddc9 --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.utils.get_subprocess diff --git a/search/docs/api/faststream/cli/supervisors/utils/set_exit.md b/search/docs/api/faststream/cli/supervisors/utils/set_exit.md new file mode 100644 index 0000000..c2bb5fb --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/utils/set_exit.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.utils.set_exit diff --git a/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md b/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md new file mode 100644 index 0000000..3cb1f84 --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.utils.subprocess_started diff --git a/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md b/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md new file mode 100644 index 0000000..93d8b7f --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.watchfiles.ExtendedFilter diff --git a/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md b/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md new file mode 100644 index 0000000..4cca751 --- /dev/null +++ b/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.supervisors.watchfiles.WatchReloader diff --git a/search/docs/api/faststream/cli/utils/imports/get_app_path.md b/search/docs/api/faststream/cli/utils/imports/get_app_path.md new file mode 100644 index 0000000..85b8019 --- /dev/null +++ b/search/docs/api/faststream/cli/utils/imports/get_app_path.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.imports.get_app_path diff --git a/search/docs/api/faststream/cli/utils/imports/import_object.md b/search/docs/api/faststream/cli/utils/imports/import_object.md new file mode 100644 index 0000000..2eaa16f --- /dev/null +++ b/search/docs/api/faststream/cli/utils/imports/import_object.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.imports.import_object diff --git a/search/docs/api/faststream/cli/utils/imports/try_import_app.md b/search/docs/api/faststream/cli/utils/imports/try_import_app.md new file mode 100644 index 0000000..97cc011 --- /dev/null +++ b/search/docs/api/faststream/cli/utils/imports/try_import_app.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.imports.try_import_app diff --git a/search/docs/api/faststream/cli/utils/logs/LogLevels.md b/search/docs/api/faststream/cli/utils/logs/LogLevels.md new file mode 100644 index 0000000..8f2cc1d --- /dev/null +++ b/search/docs/api/faststream/cli/utils/logs/LogLevels.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.logs.LogLevels diff --git a/search/docs/api/faststream/cli/utils/logs/get_log_level.md b/search/docs/api/faststream/cli/utils/logs/get_log_level.md new file mode 100644 index 0000000..e346cb9 --- /dev/null +++ b/search/docs/api/faststream/cli/utils/logs/get_log_level.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.logs.get_log_level diff --git a/search/docs/api/faststream/cli/utils/logs/set_log_level.md b/search/docs/api/faststream/cli/utils/logs/set_log_level.md new file mode 100644 index 0000000..fcb0b33 --- /dev/null +++ b/search/docs/api/faststream/cli/utils/logs/set_log_level.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.logs.set_log_level diff --git a/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md b/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md new file mode 100644 index 0000000..ae396af --- /dev/null +++ b/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.parser.parse_cli_args diff --git a/search/docs/api/faststream/cli/utils/parser/remove_prefix.md b/search/docs/api/faststream/cli/utils/parser/remove_prefix.md new file mode 100644 index 0000000..2cc7d44 --- /dev/null +++ b/search/docs/api/faststream/cli/utils/parser/remove_prefix.md @@ -0,0 +1,3 @@ + + +::: faststream.cli.utils.parser.remove_prefix diff --git a/search/docs/api/faststream/constants/ContentTypes.md b/search/docs/api/faststream/constants/ContentTypes.md new file mode 100644 index 0000000..2d857ff --- /dev/null +++ b/search/docs/api/faststream/constants/ContentTypes.md @@ -0,0 +1,3 @@ + + +::: faststream.constants.ContentTypes diff --git a/search/docs/api/faststream/exceptions/AckMessage.md b/search/docs/api/faststream/exceptions/AckMessage.md new file mode 100644 index 0000000..b6193d6 --- /dev/null +++ b/search/docs/api/faststream/exceptions/AckMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.AckMessage diff --git a/search/docs/api/faststream/exceptions/HandlerException.md b/search/docs/api/faststream/exceptions/HandlerException.md new file mode 100644 index 0000000..f0fff5d --- /dev/null +++ b/search/docs/api/faststream/exceptions/HandlerException.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.HandlerException diff --git a/search/docs/api/faststream/exceptions/NackMessage.md b/search/docs/api/faststream/exceptions/NackMessage.md new file mode 100644 index 0000000..e7ebafa --- /dev/null +++ b/search/docs/api/faststream/exceptions/NackMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.NackMessage diff --git a/search/docs/api/faststream/exceptions/RejectMessage.md b/search/docs/api/faststream/exceptions/RejectMessage.md new file mode 100644 index 0000000..9579ad1 --- /dev/null +++ b/search/docs/api/faststream/exceptions/RejectMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.RejectMessage diff --git a/search/docs/api/faststream/exceptions/SkipMessage.md b/search/docs/api/faststream/exceptions/SkipMessage.md new file mode 100644 index 0000000..7ffe579 --- /dev/null +++ b/search/docs/api/faststream/exceptions/SkipMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.SkipMessage diff --git a/search/docs/api/faststream/exceptions/StopConsume.md b/search/docs/api/faststream/exceptions/StopConsume.md new file mode 100644 index 0000000..e550003 --- /dev/null +++ b/search/docs/api/faststream/exceptions/StopConsume.md @@ -0,0 +1,3 @@ + + +::: faststream.exceptions.StopConsume diff --git a/search/docs/api/faststream/kafka/asyncapi/Handler.md b/search/docs/api/faststream/kafka/asyncapi/Handler.md new file mode 100644 index 0000000..5cef4ba --- /dev/null +++ b/search/docs/api/faststream/kafka/asyncapi/Handler.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.asyncapi.Handler diff --git a/search/docs/api/faststream/kafka/asyncapi/Publisher.md b/search/docs/api/faststream/kafka/asyncapi/Publisher.md new file mode 100644 index 0000000..d91b8fd --- /dev/null +++ b/search/docs/api/faststream/kafka/asyncapi/Publisher.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.asyncapi.Publisher diff --git a/search/docs/api/faststream/kafka/broker/KafkaBroker.md b/search/docs/api/faststream/kafka/broker/KafkaBroker.md new file mode 100644 index 0000000..64a046e --- /dev/null +++ b/search/docs/api/faststream/kafka/broker/KafkaBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.broker.KafkaBroker diff --git a/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md b/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md new file mode 100644 index 0000000..2a24f25 --- /dev/null +++ b/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.fastapi.KafkaRouter diff --git a/search/docs/api/faststream/kafka/handler/LogicHandler.md b/search/docs/api/faststream/kafka/handler/LogicHandler.md new file mode 100644 index 0000000..140e829 --- /dev/null +++ b/search/docs/api/faststream/kafka/handler/LogicHandler.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.handler.LogicHandler diff --git a/search/docs/api/faststream/kafka/message/KafkaMessage.md b/search/docs/api/faststream/kafka/message/KafkaMessage.md new file mode 100644 index 0000000..f8d81c2 --- /dev/null +++ b/search/docs/api/faststream/kafka/message/KafkaMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.message.KafkaMessage diff --git a/search/docs/api/faststream/kafka/parser/AioKafkaParser.md b/search/docs/api/faststream/kafka/parser/AioKafkaParser.md new file mode 100644 index 0000000..dbb8f28 --- /dev/null +++ b/search/docs/api/faststream/kafka/parser/AioKafkaParser.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.parser.AioKafkaParser diff --git a/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md b/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md new file mode 100644 index 0000000..f5884f9 --- /dev/null +++ b/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.producer.AioKafkaFastProducer diff --git a/search/docs/api/faststream/kafka/publisher/LogicPublisher.md b/search/docs/api/faststream/kafka/publisher/LogicPublisher.md new file mode 100644 index 0000000..9e102cf --- /dev/null +++ b/search/docs/api/faststream/kafka/publisher/LogicPublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.publisher.LogicPublisher diff --git a/search/docs/api/faststream/kafka/router/KafkaRouter.md b/search/docs/api/faststream/kafka/router/KafkaRouter.md new file mode 100644 index 0000000..9a4e764 --- /dev/null +++ b/search/docs/api/faststream/kafka/router/KafkaRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.router.KafkaRouter diff --git a/search/docs/api/faststream/kafka/security/parse_security.md b/search/docs/api/faststream/kafka/security/parse_security.md new file mode 100644 index 0000000..e6efaf4 --- /dev/null +++ b/search/docs/api/faststream/kafka/security/parse_security.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.security.parse_security diff --git a/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md b/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md new file mode 100644 index 0000000..0187ef8 --- /dev/null +++ b/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.shared.logging.KafkaLoggingMixin diff --git a/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md b/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md new file mode 100644 index 0000000..fd2053e --- /dev/null +++ b/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.shared.publisher.ABCPublisher diff --git a/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md b/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md new file mode 100644 index 0000000..5fa9d63 --- /dev/null +++ b/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.shared.router.KafkaRouter diff --git a/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md b/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md new file mode 100644 index 0000000..2480ec5 --- /dev/null +++ b/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.shared.schemas.ConsumerConnectionParams diff --git a/search/docs/api/faststream/kafka/test/FakeProducer.md b/search/docs/api/faststream/kafka/test/FakeProducer.md new file mode 100644 index 0000000..564ecbd --- /dev/null +++ b/search/docs/api/faststream/kafka/test/FakeProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.test.FakeProducer diff --git a/search/docs/api/faststream/kafka/test/TestKafkaBroker.md b/search/docs/api/faststream/kafka/test/TestKafkaBroker.md new file mode 100644 index 0000000..33fb0a8 --- /dev/null +++ b/search/docs/api/faststream/kafka/test/TestKafkaBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.test.TestKafkaBroker diff --git a/search/docs/api/faststream/kafka/test/build_message.md b/search/docs/api/faststream/kafka/test/build_message.md new file mode 100644 index 0000000..a1e6804 --- /dev/null +++ b/search/docs/api/faststream/kafka/test/build_message.md @@ -0,0 +1,3 @@ + + +::: faststream.kafka.test.build_message diff --git a/search/docs/api/faststream/log/formatter/ColourizedFormatter.md b/search/docs/api/faststream/log/formatter/ColourizedFormatter.md new file mode 100644 index 0000000..7fad07c --- /dev/null +++ b/search/docs/api/faststream/log/formatter/ColourizedFormatter.md @@ -0,0 +1,3 @@ + + +::: faststream.log.formatter.ColourizedFormatter diff --git a/search/docs/api/faststream/log/formatter/expand_log_field.md b/search/docs/api/faststream/log/formatter/expand_log_field.md new file mode 100644 index 0000000..cde1975 --- /dev/null +++ b/search/docs/api/faststream/log/formatter/expand_log_field.md @@ -0,0 +1,3 @@ + + +::: faststream.log.formatter.expand_log_field diff --git a/search/docs/api/faststream/log/formatter/make_record_with_extra.md b/search/docs/api/faststream/log/formatter/make_record_with_extra.md new file mode 100644 index 0000000..cab315c --- /dev/null +++ b/search/docs/api/faststream/log/formatter/make_record_with_extra.md @@ -0,0 +1,3 @@ + + +::: faststream.log.formatter.make_record_with_extra diff --git a/search/docs/api/faststream/log/logging/configure_formatter.md b/search/docs/api/faststream/log/logging/configure_formatter.md new file mode 100644 index 0000000..ff96783 --- /dev/null +++ b/search/docs/api/faststream/log/logging/configure_formatter.md @@ -0,0 +1,3 @@ + + +::: faststream.log.logging.configure_formatter diff --git a/search/docs/api/faststream/nats/asyncapi/Handler.md b/search/docs/api/faststream/nats/asyncapi/Handler.md new file mode 100644 index 0000000..7b94601 --- /dev/null +++ b/search/docs/api/faststream/nats/asyncapi/Handler.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.asyncapi.Handler diff --git a/search/docs/api/faststream/nats/asyncapi/Publisher.md b/search/docs/api/faststream/nats/asyncapi/Publisher.md new file mode 100644 index 0000000..93912fa --- /dev/null +++ b/search/docs/api/faststream/nats/asyncapi/Publisher.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.asyncapi.Publisher diff --git a/search/docs/api/faststream/nats/broker/NatsBroker.md b/search/docs/api/faststream/nats/broker/NatsBroker.md new file mode 100644 index 0000000..10aa12b --- /dev/null +++ b/search/docs/api/faststream/nats/broker/NatsBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.broker.NatsBroker diff --git a/search/docs/api/faststream/nats/fastapi/NatsRouter.md b/search/docs/api/faststream/nats/fastapi/NatsRouter.md new file mode 100644 index 0000000..eaa8520 --- /dev/null +++ b/search/docs/api/faststream/nats/fastapi/NatsRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.fastapi.NatsRouter diff --git a/search/docs/api/faststream/nats/handler/LogicNatsHandler.md b/search/docs/api/faststream/nats/handler/LogicNatsHandler.md new file mode 100644 index 0000000..1a53b76 --- /dev/null +++ b/search/docs/api/faststream/nats/handler/LogicNatsHandler.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.handler.LogicNatsHandler diff --git a/search/docs/api/faststream/nats/helpers/StreamBuilder.md b/search/docs/api/faststream/nats/helpers/StreamBuilder.md new file mode 100644 index 0000000..cee7ef2 --- /dev/null +++ b/search/docs/api/faststream/nats/helpers/StreamBuilder.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.helpers.StreamBuilder diff --git a/search/docs/api/faststream/nats/js_stream/JStream.md b/search/docs/api/faststream/nats/js_stream/JStream.md new file mode 100644 index 0000000..6fd13ad --- /dev/null +++ b/search/docs/api/faststream/nats/js_stream/JStream.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.js_stream.JStream diff --git a/search/docs/api/faststream/nats/message/NatsMessage.md b/search/docs/api/faststream/nats/message/NatsMessage.md new file mode 100644 index 0000000..2b1cf46 --- /dev/null +++ b/search/docs/api/faststream/nats/message/NatsMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.message.NatsMessage diff --git a/search/docs/api/faststream/nats/parser/NatsParser.md b/search/docs/api/faststream/nats/parser/NatsParser.md new file mode 100644 index 0000000..351ade6 --- /dev/null +++ b/search/docs/api/faststream/nats/parser/NatsParser.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.parser.NatsParser diff --git a/search/docs/api/faststream/nats/producer/NatsFastProducer.md b/search/docs/api/faststream/nats/producer/NatsFastProducer.md new file mode 100644 index 0000000..c306c81 --- /dev/null +++ b/search/docs/api/faststream/nats/producer/NatsFastProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.producer.NatsFastProducer diff --git a/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md b/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md new file mode 100644 index 0000000..e8b5402 --- /dev/null +++ b/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.producer.NatsJSFastProducer diff --git a/search/docs/api/faststream/nats/publisher/LogicPublisher.md b/search/docs/api/faststream/nats/publisher/LogicPublisher.md new file mode 100644 index 0000000..01869cc --- /dev/null +++ b/search/docs/api/faststream/nats/publisher/LogicPublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.publisher.LogicPublisher diff --git a/search/docs/api/faststream/nats/router/NatsRouter.md b/search/docs/api/faststream/nats/router/NatsRouter.md new file mode 100644 index 0000000..3c06960 --- /dev/null +++ b/search/docs/api/faststream/nats/router/NatsRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.router.NatsRouter diff --git a/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md b/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md new file mode 100644 index 0000000..90b6c3c --- /dev/null +++ b/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.shared.logging.NatsLoggingMixin diff --git a/search/docs/api/faststream/nats/shared/router/NatsRouter.md b/search/docs/api/faststream/nats/shared/router/NatsRouter.md new file mode 100644 index 0000000..53dc2d1 --- /dev/null +++ b/search/docs/api/faststream/nats/shared/router/NatsRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.shared.router.NatsRouter diff --git a/search/docs/api/faststream/nats/test/FakeProducer.md b/search/docs/api/faststream/nats/test/FakeProducer.md new file mode 100644 index 0000000..17bb040 --- /dev/null +++ b/search/docs/api/faststream/nats/test/FakeProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.test.FakeProducer diff --git a/search/docs/api/faststream/nats/test/PatchedMessage.md b/search/docs/api/faststream/nats/test/PatchedMessage.md new file mode 100644 index 0000000..b2cd58d --- /dev/null +++ b/search/docs/api/faststream/nats/test/PatchedMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.test.PatchedMessage diff --git a/search/docs/api/faststream/nats/test/TestNatsBroker.md b/search/docs/api/faststream/nats/test/TestNatsBroker.md new file mode 100644 index 0000000..bf5dcf3 --- /dev/null +++ b/search/docs/api/faststream/nats/test/TestNatsBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.test.TestNatsBroker diff --git a/search/docs/api/faststream/nats/test/build_message.md b/search/docs/api/faststream/nats/test/build_message.md new file mode 100644 index 0000000..ee8d45a --- /dev/null +++ b/search/docs/api/faststream/nats/test/build_message.md @@ -0,0 +1,3 @@ + + +::: faststream.nats.test.build_message diff --git a/search/docs/api/faststream/rabbit/asyncapi/Handler.md b/search/docs/api/faststream/rabbit/asyncapi/Handler.md new file mode 100644 index 0000000..6af44c6 --- /dev/null +++ b/search/docs/api/faststream/rabbit/asyncapi/Handler.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.asyncapi.Handler diff --git a/search/docs/api/faststream/rabbit/asyncapi/Publisher.md b/search/docs/api/faststream/rabbit/asyncapi/Publisher.md new file mode 100644 index 0000000..ac4f343 --- /dev/null +++ b/search/docs/api/faststream/rabbit/asyncapi/Publisher.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.asyncapi.Publisher diff --git a/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md b/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md new file mode 100644 index 0000000..1a4923c --- /dev/null +++ b/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.asyncapi.RMQAsyncAPIChannel diff --git a/search/docs/api/faststream/rabbit/broker/RabbitBroker.md b/search/docs/api/faststream/rabbit/broker/RabbitBroker.md new file mode 100644 index 0000000..39f86ba --- /dev/null +++ b/search/docs/api/faststream/rabbit/broker/RabbitBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.broker.RabbitBroker diff --git a/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md b/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md new file mode 100644 index 0000000..e19594f --- /dev/null +++ b/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.fastapi.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/handler/LogicHandler.md b/search/docs/api/faststream/rabbit/handler/LogicHandler.md new file mode 100644 index 0000000..bc9da85 --- /dev/null +++ b/search/docs/api/faststream/rabbit/handler/LogicHandler.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.handler.LogicHandler diff --git a/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md b/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md new file mode 100644 index 0000000..31e8eaf --- /dev/null +++ b/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.helpers.RabbitDeclarer diff --git a/search/docs/api/faststream/rabbit/message/RabbitMessage.md b/search/docs/api/faststream/rabbit/message/RabbitMessage.md new file mode 100644 index 0000000..8544769 --- /dev/null +++ b/search/docs/api/faststream/rabbit/message/RabbitMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.message.RabbitMessage diff --git a/search/docs/api/faststream/rabbit/parser/AioPikaParser.md b/search/docs/api/faststream/rabbit/parser/AioPikaParser.md new file mode 100644 index 0000000..24b8258 --- /dev/null +++ b/search/docs/api/faststream/rabbit/parser/AioPikaParser.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.parser.AioPikaParser diff --git a/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md b/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md new file mode 100644 index 0000000..89866c6 --- /dev/null +++ b/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.producer.AioPikaFastProducer diff --git a/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md b/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md new file mode 100644 index 0000000..cfbb1e1 --- /dev/null +++ b/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.publisher.LogicPublisher diff --git a/search/docs/api/faststream/rabbit/router/RabbitRouter.md b/search/docs/api/faststream/rabbit/router/RabbitRouter.md new file mode 100644 index 0000000..9e82b4d --- /dev/null +++ b/search/docs/api/faststream/rabbit/router/RabbitRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.router.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/security/parse_security.md b/search/docs/api/faststream/rabbit/security/parse_security.md new file mode 100644 index 0000000..9d4ded7 --- /dev/null +++ b/search/docs/api/faststream/rabbit/security/parse_security.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.security.parse_security diff --git a/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md b/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md new file mode 100644 index 0000000..a92285d --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.constants.ExchangeType diff --git a/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md b/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md new file mode 100644 index 0000000..e01ac23 --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.logging.RabbitLoggingMixin diff --git a/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md b/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md new file mode 100644 index 0000000..50ef041 --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.publisher.ABCPublisher diff --git a/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md b/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md new file mode 100644 index 0000000..2be3db3 --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.router.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md b/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md new file mode 100644 index 0000000..e0f233b --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.schemas.BaseRMQInformation diff --git a/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md b/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md new file mode 100644 index 0000000..378d85a --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.schemas.RabbitExchange diff --git a/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md b/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md new file mode 100644 index 0000000..be64b41 --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.schemas.RabbitQueue diff --git a/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md b/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md new file mode 100644 index 0000000..5d56af2 --- /dev/null +++ b/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.shared.schemas.get_routing_hash diff --git a/search/docs/api/faststream/rabbit/test/FakeProducer.md b/search/docs/api/faststream/rabbit/test/FakeProducer.md new file mode 100644 index 0000000..2c322d5 --- /dev/null +++ b/search/docs/api/faststream/rabbit/test/FakeProducer.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.test.FakeProducer diff --git a/search/docs/api/faststream/rabbit/test/PatchedMessage.md b/search/docs/api/faststream/rabbit/test/PatchedMessage.md new file mode 100644 index 0000000..1d48ae3 --- /dev/null +++ b/search/docs/api/faststream/rabbit/test/PatchedMessage.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.test.PatchedMessage diff --git a/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md b/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md new file mode 100644 index 0000000..df580a8 --- /dev/null +++ b/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.test.TestRabbitBroker diff --git a/search/docs/api/faststream/rabbit/test/build_message.md b/search/docs/api/faststream/rabbit/test/build_message.md new file mode 100644 index 0000000..ce8d58f --- /dev/null +++ b/search/docs/api/faststream/rabbit/test/build_message.md @@ -0,0 +1,3 @@ + + +::: faststream.rabbit.test.build_message diff --git a/search/docs/api/faststream/utils/classes/Singleton.md b/search/docs/api/faststream/utils/classes/Singleton.md new file mode 100644 index 0000000..50c6564 --- /dev/null +++ b/search/docs/api/faststream/utils/classes/Singleton.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.classes.Singleton diff --git a/search/docs/api/faststream/utils/context/main/ContextRepo.md b/search/docs/api/faststream/utils/context/main/ContextRepo.md new file mode 100644 index 0000000..a2ff21a --- /dev/null +++ b/search/docs/api/faststream/utils/context/main/ContextRepo.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.context.main.ContextRepo diff --git a/search/docs/api/faststream/utils/context/types/Context.md b/search/docs/api/faststream/utils/context/types/Context.md new file mode 100644 index 0000000..456c6bd --- /dev/null +++ b/search/docs/api/faststream/utils/context/types/Context.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.context.types.Context diff --git a/search/docs/api/faststream/utils/context/types/resolve_context.md b/search/docs/api/faststream/utils/context/types/resolve_context.md new file mode 100644 index 0000000..53855bc --- /dev/null +++ b/search/docs/api/faststream/utils/context/types/resolve_context.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.context.types.resolve_context diff --git a/search/docs/api/faststream/utils/data/filter_by_dict.md b/search/docs/api/faststream/utils/data/filter_by_dict.md new file mode 100644 index 0000000..83ce1b9 --- /dev/null +++ b/search/docs/api/faststream/utils/data/filter_by_dict.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.data.filter_by_dict diff --git a/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md b/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md new file mode 100644 index 0000000..cc9478c --- /dev/null +++ b/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.functions.get_function_positional_arguments diff --git a/search/docs/api/faststream/utils/functions/timeout_scope.md b/search/docs/api/faststream/utils/functions/timeout_scope.md new file mode 100644 index 0000000..5a55da4 --- /dev/null +++ b/search/docs/api/faststream/utils/functions/timeout_scope.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.functions.timeout_scope diff --git a/search/docs/api/faststream/utils/functions/to_async.md b/search/docs/api/faststream/utils/functions/to_async.md new file mode 100644 index 0000000..98689c4 --- /dev/null +++ b/search/docs/api/faststream/utils/functions/to_async.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.functions.to_async diff --git a/search/docs/api/faststream/utils/no_cast/NoCast.md b/search/docs/api/faststream/utils/no_cast/NoCast.md new file mode 100644 index 0000000..180a5ef --- /dev/null +++ b/search/docs/api/faststream/utils/no_cast/NoCast.md @@ -0,0 +1,3 @@ + + +::: faststream.utils.no_cast.NoCast diff --git a/search/docs/getting-started/asyncapi/custom.md b/search/docs/getting-started/asyncapi/custom.md new file mode 100644 index 0000000..9771dca --- /dev/null +++ b/search/docs/getting-started/asyncapi/custom.md @@ -0,0 +1,188 @@ +# Customizing AsyncAPI Documentation for FastStream + +In this guide, we will explore how to customize AsyncAPI documentation for your FastStream application. Whether you want to add custom app info, broker information, handlers, or fine-tune payload details, we'll walk you through each step. + +## Prerequisites + +Before we dive into customization, ensure you have a basic FastStream application up and running. If you haven't done that yet, let's setup a simple appication right now. + +Copy the following code in your basic.py file: + +```python linenums="1" +from faststream import FastStream +from faststream.kafka import KafkaBroker, KafkaMessage + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +@broker.publisher("output_data") +@broker.subscriber("input_data") +async def on_input_data(msg): + # your processing logic + pass +``` + +faststream docs serve basic:app + +![HTML-page](../../../assets/img/AsyncAPI-basic-html-short.png) + +## Setup Custom FastStream App Info + +Let's start by customizing the app information that appears in your AsyncAPI documentation. This is a great way to give your documentation a personal touch. Here's how: + +1. Locate the app configuration in your FastStream application. +1. Update the `title`, `version`, and `description` fields to reflect your application's details. +1. Save the changes. +1. Serve your FastStream app documentation. + +Copy the following code in your basic.py file, we have highligted the additional info passed to FastStream app: + +```python linenums="1" hl_lines="7-12" +from faststream import FastStream +from faststream.kafka import KafkaBroker, KafkaMessage +from faststream.asyncapi.schema import Contact, ExternalDocs, License, Tag + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker, + title="My App", + version="1.0.0", + description="Test description", + license=License(name="MIT", url="https://opensource.org/license/mit/"), + terms_of_service="https://my-terms.com/", + contact=Contact(name="support", url="https://help.com/"), + ) + +@broker.publisher("output_data") +@broker.subscriber("input_data") +async def on_input_data(msg): + # your processing logic + pass +``` + +faststream docs serve basic:app + +![HTML-page](../../../assets/img/AsyncAPI-custom-info.png) + +Now, your documentation reflects your application's identity and purpose. + +## Setup Custom Broker Information + +The next step is to customize broker information. This helps users understand the messaging system your application uses. Follow these steps: + +1. Locate the broker configuration in your FastStream application. +1. Update the `description` field. +1. Save the changes. +1. Serve your FastStream app. + +Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app broker: + +```python linenums="1" hl_lines="5" +from faststream import FastStream +from faststream.kafka import KafkaBroker, KafkaMessage +from faststream.asyncapi.schema import Tag + +broker = KafkaBroker("localhost:9092", description="Kafka broker running locally") +app = FastStream(broker) + +@broker.publisher("output_data") +@broker.subscriber("input_data") +async def on_input_data(msg): + # your processing logic + pass +``` + +faststream docs serve basic:app + +![HTML-page](../../../assets/img/AsyncAPI-custom-broker.png) + +Your AsyncAPI documentation now provides clear insights into the messaging infrastructure you're using. + +## Setup Custom Handler Information + +Customizing handler information helps users comprehend the purpose and behavior of each message handler. Here's how to do it: + +1. Navigate to your handler definitions in your FastStream application. +1. Add descriptions to each handler using `description` field. +1. Save the changes. +1. Serve your FastStream app. + +Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app handlers: + +```python linenums="1" hl_lines="7-8" +from faststream import FastStream +from faststream.kafka import KafkaBroker, KafkaMessage + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +@broker.publisher("output_data", description="My publisher description") +@broker.subscriber("input_data", description="My subscriber description") +async def on_input_data(msg): + # your processing logic + pass +``` + +faststream docs serve basic:app + +![HTML-page](../../../assets/img/AsyncAPI-custom-handler.png) + +Now, your documentation is enriched with meaningful details about each message handler. + +## Setup Payload Information via Pydantic Model + +To describe your message payload effectively, you can use Pydantic models. Here's how: + +1. Define Pydantic models for your message payloads. +1. Annotate these models with descriptions and examples. +1. Use these models as argument types or return types in your handlers. +1. Save the changes. +1. Serve your FastStream app. + +Copy the following code in your basic.py file, we have highligted the creation of payload info and you can see it being passed to the return type and the `msg` argument type in the `on_input_data` function: + +```python linenums="1" hl_lines="5" +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream +from faststream.kafka import KafkaBroker + + +class DataBasic(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("output_data") +@broker.subscriber("input_data") +async def on_input_data(msg: DataBasic) -> DataBasic: + # your processing logic + pass +``` + +faststream docs serve basic:app + +![HTML-page](../../../assets/img/AsyncAPI-payload-info.png) + +Your AsyncAPI documentation now showcases well-structured payload information. + +## Generate Schema.json, Customize Manually, and Serve It + +To take customization to the next level, you can manually modify the schema.json file. Follow these steps: + +faststream docs gen basic:app +1. Manually edit the asyncapi.json file to add custom fields, descriptions, and details. +1. Save your changes. +faststream docs serve asyncapi.json + +Now, you have fine-tuned control over your AsyncAPI documentation. + +## Conclusion + +Customizing AsyncAPI documentation for your FastStream application not only enhances its appearance but also provides valuable insights to users. With these steps, you can create documentation that's not only informative but also uniquely yours. + +Happy coding with your customized FastStream AsyncAPI documentation! diff --git a/search/docs/getting-started/asyncapi/export.md b/search/docs/getting-started/asyncapi/export.md new file mode 100644 index 0000000..177f854 --- /dev/null +++ b/search/docs/getting-started/asyncapi/export.md @@ -0,0 +1,56 @@ +# How to Generate and Serve AsyncAPI Documentation + +In this guide, let's explore how to generate and serve [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} documentation for our FastStream application. + +## Writing the FastStream Application + +Here's an example Python application using **FastStream** that consumes data from a +topic, increments the value, and outputs the data to another topic. +Save it in a file called `basic.py`. + +``` python +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class DataBasic(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("output_data") +@broker.subscriber("input_data") +async def on_input_data(msg: DataBasic, logger: Logger) -> DataBasic: + logger.info(msg) + return DataBasic(data=msg.data + 1.0) +``` + +## Generating the AsyncAPI Specification + +Now that we have a FastStream application, we can proceed with generating the AsyncAPI specification using a CLI command. + +``` shell +faststream docs gen basic:app +``` + +The above command will generate the AsyncAPI specification and save it in a file called `asyncapi.json`. + +If you prefer `yaml` instead of `json`, please run the following command to generate `asyncapi.yaml`. + +``` shell +faststream docs gen --yaml basic:app +``` + +!!! note + To generate the documentation in yaml format, please install the necessary dependency to work with **YAML** file format at first. + + ``` shell + pip install PyYAML + ``` diff --git a/search/docs/getting-started/asyncapi/hosting.md b/search/docs/getting-started/asyncapi/hosting.md new file mode 100644 index 0000000..67b8ef3 --- /dev/null +++ b/search/docs/getting-started/asyncapi/hosting.md @@ -0,0 +1,38 @@ +# Serving the AsyncAPI Documentation + +FastStream provides a command to serve the AsyncAPI documentation. + +!!! note + This feature requires an Internet connection to obtain the **AsyncAPI HTML** via **CDN**. + +``` shell +faststream docs serve basic:app +``` + +In the above command, we are providing the path in the format of `python_module:FastStream`. Alternatively, you can also specify `asyncapi.json` or `asyncapi.yaml` to serve the AsyncAPI documentation. + +``` shell +faststream docs serve asyncapi.json +# or +faststream docs serve asyncapi.yaml +``` + +After running the command, it should serve the AsyncAPI documentation on port **8000** and display the following logs in the terminal. + +``` shell +INFO: Started server process [2364992] +INFO: Waiting for application startup. +INFO: Application startup complete. +INFO: Uvicorn running on http://localhost:8000 (Press CTRL+C to quit) +``` + +And you should be able to see the following page in your browser: + +=== "Short" + ![HTML-page](../../../assets/img/AsyncAPI-basic-html-short.png){ loading=lazy } + +=== "Expand" + ![HTML-page](../../../assets/img/AsyncAPI-basic-html-full.png){ loading=lazy } + +!!! tip + The command also offers options to serve the documentation on a different host and port. diff --git a/search/docs/getting-started/cli/index.md b/search/docs/getting-started/cli/index.md new file mode 100644 index 0000000..4891760 --- /dev/null +++ b/search/docs/getting-started/cli/index.md @@ -0,0 +1,128 @@ +# CLI + +**FastStream** has its own built-in **CLI** tool for your maximum comfort as a developer. + +!!! quote "" + Thanks to [*typer*](https://typer.tiangolo.com/){.external-link target="_blank"} and [*watchfiles*](https://watchfiles.helpmanual.io/){.external-link target="_blank"}. Their work is the basis of this tool. + +```shell +faststream --help +``` + +```{ .shell .no-copy } +Usage: faststream [OPTIONS] COMMAND [ARGS]... + + Generate, run and manage FastStream apps to greater development experience + +Options: + -v, --version Show current platform, python and FastStream + version + --install-completion [bash|zsh|fish|powershell|pwsh] + Install completion for the specified shell. + --show-completion [bash|zsh|fish|powershell|pwsh] + Show completion for the specified shell, to + copy it or customize the installation. + --help Show this message and exit. + +Commands: + docs AsyncAPI schema commands + run Run [MODULE:APP] FastStream application +``` + +## Running the Project + +### Multiprocessing Scaling + +**FastStream** allows you to scale application right from the command line by running you application in the Process pool. + +Just set the `--worker` option to scale your application: + +```shell +faststream run serve:app --workers 2 +``` + +```{ .shell .no-copy } +INFO - Started parent process [7591] +INFO - Started child process [7593] +INFO - Started child process [7594] +INFO - test | - `Handle` waiting for messages +INFO - test | - `Handle` waiting for messages +``` + +### Hot Reload + +Thanks to [*watchfiles*](https://watchfiles.helpmanual.io/){.external-link target="_blank"}, written in *Rust*, you can +work with your project easily. Edit the code as much as you like - the new version has already been launched and is waiting for your requests! + +```shell +faststream run serve:app --reload +``` + +```{ .shell .no-copy } +INFO - Started reloader process [7902] using WatchFiles +INFO - FastStream app starting... +INFO - test | - `Handle` waiting for messages +INFO - FastStream app started successfully! To exit press CTRL+C +``` + +### Environment Management + +You can pass any custom flags and launch options to the **FastStream CLI** even without first registering them. Just use them when launching the application - and they will be right in your environment. + +Use this option to select environment files, configure logging, or at your discretion. + +For example, we will pass the *.env* file to the context of our application: + +```shell +faststream run serve:app --env=.env.dev +``` + +```{ .shell .no-copy } +INFO - FastStream app starting... +INFO - test | - `Handle` waiting for messages +INFO - FastStream app started successfully! To exit press CTRL+C +``` + +{! includes/getting_started/cli/env.md !} + +!!! note + Note that the `env` parameter was passed to the `setup` function directly from the command line + +All passed values can be of type `#!python bool`, `#!python str` or `#!python list[str]`. + +In this case, the flags will be interpreted as follows: + +```{ .shell .no-copy } +faststream run app:app --flag # flag = True +faststream run app:app --no-flag # flag = False +faststream run app:app --my-flag # my_flag = True +faststream run app:app --key value # key = "value" +faststream run app:app --key 1 2 # key = ["1", "2"] +``` + +You can use them both individually and together in unlimited quantities. + +## AsyncAPI Schema + +Also, the **FastStream CLI** allows you to work with the **AsyncAPI** schema in a simple way. + +You are able to generate `.json` or `.yaml` files by your application code or host **HTML** representation directly: + +```shell +faststream docs --help +``` + +```{ .shell .no-copy } +Usage: faststream docs [OPTIONS] COMMAND [ARGS]... + + AsyncAPI schema commands + +Options: + --help Show this message and exit. + +Commands: + gen Generate project AsyncAPI schema + serve Serve project AsyncAPI schema +``` + +To learn more about the commands above, please visit [**AsyncAPI export**](../asyncapi/export.md){.internal-link} and [**AsyncAPI hosting**](../asyncapi/hosting.md){.internal-link}. diff --git a/search/docs/getting-started/config/index.md b/search/docs/getting-started/config/index.md new file mode 100644 index 0000000..796dcfa --- /dev/null +++ b/search/docs/getting-started/config/index.md @@ -0,0 +1,167 @@ +# Settings and Environment Variables + +In many cases, your application may require external settings or configurations, such as a broker connection or database credentials. + +To manage these settings effectively, it's common to provide them through environment variables that can be read by the application. + +## Pydantic `Settings` + +Fortunately, **Pydantic** provides a useful utility for handling settings coming from environment variables with [Pydantic: Settings management](https://docs.pydantic.dev/latest/usage/pydantic_settings/){.external-link target="_blank"}. + +### Install `pydantic-settings` + +First, install the `pydantic-settings` package: + +```console +pip install pydantic-settings +``` + +!!! info + In **Pydantic v1**, this functionality was included with the main package. Now it is distributed as an independent package so that you can choose not to install it if you don't need that functionality. + +### Create the `Settings` Object + +Import `BaseSettings` from Pydantic and create a subclass, similar to what you would do with a Pydantic model. + +Just like with Pydantic models, you declare class attributes with type annotations and can use all the same validation features and tools, including different data types and additional validations with `Field()`. + +=== "Pydantic v2" + ```python linenums='1' hl_lines="1 4" title="config.py" +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + url: str = "" + queue: str = "test-queue" + + +settings = Settings() + ``` + +=== "Pydantic v1" + !!! info + In **Pydantic v1** you would import `BaseSettings` directly from `pydantic` instead of from `pydantic_settings`. + + ```python linenums='1' hl_lines="1 4" title="config.py" +from pydantic import BaseSettings + + +class Settings(BaseSettings): + url: str = "" + queue: str = "test-queue" + + +settings = Settings() + ``` + +When you create an instance of that `Settings` class (in this case, in the `settings` object), Pydantic will read the environment variables in a case-insensitive way. For example, an upper-case variable `APP_NAME` will still be read for the attribute `app_name`. + +It will also convert and validate the data, so when you use that `settings` object, you will have data of the type you declared (e.g. `items_per_user` will be an `int`). + +### Using the `settings` + +Now you can use the new `settings` object in your application: + +```python linenums='1' hl_lines="3 9 14" title="serve.py" +import os + +from pydantic_settings import BaseSettings + +from faststream import FastStream +from faststream.rabbit import RabbitBroker + + +class Settings(BaseSettings): + url: str + queue: str = "test-queue" + + +settings = Settings(_env_file=os.getenv("ENV", ".env")) + +broker = RabbitBroker(settings.url) +app = FastStream(broker) + + +@broker.subscriber(settings.queue) +async def handler(msg): + ... +``` + +### Running the Application + +You can run the application while passing the configuration parameters as environment variables. For example, you could set an `URL`: + +```console +URL="amqp://guest:guest@localhost:5672" faststream run serve:app +``` + +!!! tip + To set multiple environment variables for a single command, separate them with spaces and put them all before the command. + +## Reading a `.env` File + +If you have many settings that may change frequently, especially in different environments, it might be useful to store them in a file and then read them as if they were environment variables. + +This practice is common enough that it has a name; these environment variables are typically placed in a file named `.env`, commonly referred to as a "dotenv" file. + +!!! tip + In Unix-like systems like Linux and macOS, a file starting with a dot (`.`) is considered a hidden file. + + But a dotenv file doesn't really have to have that exact filename. + +Pydantic supports reading from these types of files using an external library. You can learn more at [Pydantic Settings: Dotenv (.env) support](https://docs.pydantic.dev/latest/usage/pydantic_settings/#dotenv-env-support){.external-link target="_blank"}. + +!!! tip + To use this feature, you need to install the `python-dotenv` library. + +### The `.env` File + +You can create a `.env` file with contents like this: + +```bash +URL="amqp://guest:guest@localhost:5672" +QUEUE="test-queue" +``` + +### Reading Settings from `.env` + +Then update your `config.py` as follows: + +```python linenums='1' hl_lines="1 11" +import os + +from pydantic_settings import BaseSettings + + +class Settings(BaseSettings): + url: str + queue: str = "test-queue" + + +settings = Settings(_env_file=os.getenv("ENV", ".env")) +``` + +This way, you can specify different `.env` files directly from your terminal, which can be extremely helpful for various testing and production scenarios. + +!!! note + By default, Pydantic will attempt to find a `.env` file. If it's not present, Pydantic will use the default field values. + +### Choosing the `.env` File at Startup + +Now you can run the apllication with different `.env` files like so: + +```console +ENV=.local.env faststream run serve:app +``` + +Or, for a production environment: + +```console +ENV=.production.env faststream run serve:app +``` + +Or even for a test environment: + +```console +ENV=.test.env pytest +``` diff --git a/search/docs/getting-started/context/custom.md b/search/docs/getting-started/context/custom.md new file mode 100644 index 0000000..dd99314 --- /dev/null +++ b/search/docs/getting-started/context/custom.md @@ -0,0 +1,83 @@ +# Context Fields Declaration + +You can also store your own objects in the `Context`. + +## Global + +To declare an application-level context field, you need to call the `context.set_global` method with with a key to indicate where the object will be placed in the context. + +=== "Kafka" + ```python linenums="1" hl_lines="9-10" + {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:1-5,16-18] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="9-10" + {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:1-5,16-18] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="9-10" + {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:1-5,16-18] !} + ``` + +Afterward, you can access your `secret` field in the usual way: + +=== "Kafka" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:8-13] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:8-13] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:8-13] !} + ``` + +In this case, the field becomes a global context field: it does not depend on the current message handler (unlike `message`) + +To remove a field from the context use the `reset_global` method: + +```python +context.reset_global("my_key") +``` + +## Local + +To set a local context (available only within the message processing scope), use the context manager `scope` + +=== "Kafka" + ```python linenums="1" hl_lines="15 19 21-22" + {!> docs_src/getting_started/context/custom_local_context_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="15 19 21-22" + {!> docs_src/getting_started/context/custom_local_context_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="15 19 21-22" + {!> docs_src/getting_started/context/custom_local_context_nats.py !} + ``` + +You can also set the context yourself, and it will remain within the current call stack until you clear it. + +=== "Kafka" + ```python linenums="1" hl_lines="1 14 25" + {!> docs_src/getting_started/context/manual_local_context_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 14 25" + {!> docs_src/getting_started/context/manual_local_context_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 14 25" + {!> docs_src/getting_started/context/manual_local_context_nats.py !} + ``` diff --git a/search/docs/getting-started/context/existed.md b/search/docs/getting-started/context/existed.md new file mode 100644 index 0000000..935e7a0 --- /dev/null +++ b/search/docs/getting-started/context/existed.md @@ -0,0 +1,82 @@ +# Existing Fields + +**Context** already contains some global objects that you can always access: + +* **broker** - the current broker +* **context** - the context itself, in which you can write your own fields +* **logger** - the logger used for your broker (tags messages with *message_id*) +* **message** - the raw message (if you need access to it) + +At the same time, thanks to `contextlib.ContextVar`, **message** is local for you current consumer scope. + +## Access to Context Fields + +By default, the context searches for an object based on the argument name. + +=== "Kafka" + ```python linenums="1" hl_lines="1 12-15" + {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-2,10-11,14-23] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 12-15" + {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-2,10-11,14-23] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 12-15" + {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-2,10-11,14-23] !} + ``` + +## Annotated Aliases + +Also, **FastStream** has already created `Annotated` aliases to provide you with comfortable access to existing objects. You can import them directly from `faststream` or your broker-specific modules: + +* Shared aliases + +```python +from faststream import Logger, ContextRepo +``` + +* *Kafka* aliases + +```python +from faststream.kafka.annotations import ( + Logger, ContextRepo, KafkaMessage, KafkaBroker, KafkaProducer +) +``` + +* *RabbitMQ* aliases + +```python +from faststream.rabbit.annotations import ( + Logger, ContextRepo, RabbitMessage, RabbitBroker, RabbitProducer +) +``` + +* *NATS* aliases + +```python +from faststream.rabbit.annotations import ( + Logger, ContextRepo, NatsMessage, + NatsBroker, NatsProducer, NatsJsProducer, + Client, JsClient, +) +``` + +To use them, simply import and use them as subscriber argument annotations. + +=== "Kafka" + ```python linenums="1" hl_lines="3-8 17-20" + {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-11,26-35] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="3-8 17-20" + {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-11,26-35] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="3-8 17-20" + {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-11,26-35] !} + ``` diff --git a/search/docs/getting-started/context/extra.md b/search/docs/getting-started/context/extra.md new file mode 100644 index 0000000..3824e51 --- /dev/null +++ b/search/docs/getting-started/context/extra.md @@ -0,0 +1,60 @@ +# Context Extra Options + +Additionally, `Context` provides you with some extra capabilities for working with containing objects. + +## Default Values + +For instance, if you attempt to access a field that doesn't exist in the global context, you will receive a `pydantic.ValidationError` exception. + +However, you can set default values if needed. + +=== "Kafka" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/default_arguments_kafka.py [ln:7-11] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/default_arguments_rabbit.py [ln:7-11] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/default_arguments_nats.py [ln:7-11] !} + ``` + +## Cast Context Types + +By default, context fields are **NOT CAST** to the type specified in their annotation. + +=== "Kafka" + ```python linenums="1" hl_lines="6 10 12" + {!> docs_src/getting_started/context/cast_kafka.py [ln:1-12] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="6 10 12" + {!> docs_src/getting_started/context/cast_rabbit.py [ln:1-12] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="6 10 12" + {!> docs_src/getting_started/context/cast_nats.py [ln:1-12] !} + ``` + +If you require this functionality, you can enable the appropriate flag. + +=== "Kafka" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/cast_kafka.py [ln:14-18] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/cast_rabbit.py [ln:14-18] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="3 5" + {!> docs_src/getting_started/context/cast_nats.py [ln:14-18] !} + ``` diff --git a/search/docs/getting-started/context/fields.md b/search/docs/getting-started/context/fields.md new file mode 100644 index 0000000..e30ff72 --- /dev/null +++ b/search/docs/getting-started/context/fields.md @@ -0,0 +1,11 @@ +--- +comment_1: This way you can get access to context object by its name +comment_2: This way you can get access to context object specific field +--- + +# Access by Name + +Sometimes, you may need to use a different name for the argument (not the one under which it is stored in the context) or get access to specific parts of the object. To do this, simply specify the name of what you want to access, and the context will provide you with the object. + +{% import 'getting_started/context/fields.md' as includes with context %} +{{ includes }} diff --git a/search/docs/getting-started/context/index.md b/search/docs/getting-started/context/index.md new file mode 100644 index 0000000..67e6d02 --- /dev/null +++ b/search/docs/getting-started/context/index.md @@ -0,0 +1,66 @@ +# Application Context + +**FastStreams** has its own Dependency Injection container - **Context**, used to store application runtime objects and variables. + +With this container, you can access both application scope and message processing scope objects. This functionality is similar to [`Depends`](../dependencies/index.md){.internal-link} usage. + +=== "Kafka" + ```python linenums="1" hl_lines="1 11" + {!> docs_src/getting_started/context/base_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 11" + {!> docs_src/getting_started/context/base_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 11" + {!> docs_src/getting_started/context/base_nats.py !} + ``` + +But, with the [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-docs target="_blank"} Python feature usage, it is much closer to `#!python @pytest.fixture`. + +=== "Kafka" + ```python linenums="1" hl_lines="1 6 15" + {!> docs_src/getting_started/context/annotated_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 6 15" + {!> docs_src/getting_started/context/annotated_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 6 15" + {!> docs_src/getting_started/context/annotated_nats.py !} + ``` + +## Usages + +By default, the context is available in the same place as `Depends`: + +* at lifespan hooks +* message subscribers +* nested dependencies + +!!! tip + Fields obtained from the `Context` are editable, so editing them in a function means editing them everywhere. + +## Compatibility with Regular Functions + +To use context in other functions, use the `#!python @apply_types` decorator. In this case, the context of the called function will correspond to the context of the event handler from which it was called. + +```python linenums="1" hl_lines="6 9-10" +from faststream import Context, apply_types +@broker.subscriber("test") +async def handler(body): + nested_func(body) + + +@apply_types +def nested_func(body, logger=Context()): + logger.info(body) +``` + +In the example above, we did not pass the `logger` function at calling it; it was placed outside of context. diff --git a/search/docs/getting-started/contributing/CONTRIBUTING.md b/search/docs/getting-started/contributing/CONTRIBUTING.md new file mode 100644 index 0000000..0eed253 --- /dev/null +++ b/search/docs/getting-started/contributing/CONTRIBUTING.md @@ -0,0 +1,143 @@ +# Development + +After cloning the project, you'll need to set up the development environment. Here are the guidelines on how to do this. + +## Virtual Environment with `venv` + +Create a virtual environment in a directory using Python's `venv` module: + +```bash +python -m venv venv +``` + +That will create a `./venv/` directory with Python binaries, allowing you to install packages in an isolated environment. + +## Activate the Environment + +Activate the new environment with: + +```bash +source ./venv/bin/activate +``` + +Ensure you have the latest pip version in your virtual environment: + +```bash +python -m pip install --upgrade pip +``` + +## Installing Dependencies + +After activating the virtual environment as described above, run: + +```bash +pip install -e ".[dev]" +``` + +This will install all the dependencies and your local FastStream in your virtual environment. + +### Using Your local FastStream + +If you create a Python file that imports and uses FastStream, and run it with the Python from your local environment, it will use your local FastStream source code. + +Whenever you update your local FastStream source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. + +This way, you don't have to "install" your local version to be able to test every change. + +To use your local FastStream CLI, type: + +```bash +python -m faststream ... +``` + +## Running Tests + +### Pytest + +To run tests with your current FastStream application and Python environment, use: + +```bash +pytest tests +# or +./scripts/test.sh +# with coverage output +./scripts/test-cov.sh +``` + +In your project, you'll find some *pytest marks*: + +* **slow** +* **rabbit** +* **kafka** +* **nats** +* **all** + +By default, running *pytest* will execute "not slow" tests. + +To run all tests use: + +```bash +pytest -m 'all' +``` + +If you don't have a local broker instance running, you can run tests without those dependencies: + +```bash +pytest -m 'not rabbit and not kafka and not nats' +``` + +To run tests based on RabbitMQ, Kafka, or other dependencies, the following dependencies are needed to be started as docker containers: + +```yaml +version: "3" +services: + # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service + rabbitmq: + image: rabbitmq:alpine + ports: + - "5672:5672" + # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges + security_opt: + - no-new-privileges:true + # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service + kafka: + image: bitnami/kafka:3.5.0 + ports: + - "9092:9092" + environment: + KAFKA_ENABLE_KRAFT: "true" + KAFKA_CFG_NODE_ID: "1" + KAFKA_CFG_PROCESS_ROLES: "broker,controller" + KAFKA_CFG_CONTROLLER_LISTENER_NAMES: "CONTROLLER" + KAFKA_CFG_LISTENERS: "PLAINTEXT://:9092,CONTROLLER://:9093" + KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT" + KAFKA_CFG_ADVERTISED_LISTENERS: "PLAINTEXT://127.0.0.1:9092" + KAFKA_BROKER_ID: "1" + KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: "1@kafka:9093" + ALLOW_PLAINTEXT_LISTENER: "true" + # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges + security_opt: + - no-new-privileges:true + # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service + nats: + image: nats + command: -js + ports: + - 4222:4222 + - 8222:8222 # management + # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges + security_opt: + - no-new-privileges:true +``` + +You can start the dependencies easily using provided script by running: + +```bash +./scripts/start_test_env.sh +``` + +Once you are done with development and running tests, you can stop the dependencies' docker containers by running: + +```bash +./scripts/stop_test_env.sh +``` diff --git a/search/docs/getting-started/contributing/docs.md b/search/docs/getting-started/contributing/docs.md new file mode 100644 index 0000000..c12180e --- /dev/null +++ b/search/docs/getting-started/contributing/docs.md @@ -0,0 +1,44 @@ +# Documentation + +## How to help + +You will be of invaluable help if you contribute to the documentation. + +Such a contribution can be: + +* Indications of inaccuracies, errors, typos +* Suggestions for editing specific sections +* Making additions + +You can report all this in [discussions](https://github.com/airtai/faststream/discussions){.external-link targer="_blank"} on GitHub, start [issue](https://github.com/airtai/faststream/issues){.external-link targer="_blank"}, or write about it in our [discord](https://discord.gg/CJWmYpyFbc){.external-link targer="_blank"} group. + +!!! note + Special thanks to those who are ready to offer help with the case and help in **developing documentation**, as well as translating it into **other languages**. + +## How to get started + +To develop the documentation, you don't even need to install the entire **FastStream** project as a whole. + +Enough: + +1. Clone the project repository +2. Create a virtual environment + ```bash + python -m venv venv + ``` +3. Activate it + ```bash + source venv/bin/activate + ``` +4. Install documentation dependencies + ```bash + pip install ".[devdocs]" + ``` +5. Go to the `docs/` directory +6. Start the local documentation server + ```bash + mkdocs serve + ``` + +Now all changes in the documentation files will be reflected on your local version of the site. +After making all the changes, you can issue a `PR` with them - and we will gladly accept it! diff --git a/search/docs/getting-started/dependencies/class.md b/search/docs/getting-started/dependencies/class.md new file mode 100644 index 0000000..e69de29 diff --git a/search/docs/getting-started/dependencies/global.md b/search/docs/getting-started/dependencies/global.md new file mode 100644 index 0000000..e8a2d4d --- /dev/null +++ b/search/docs/getting-started/dependencies/global.md @@ -0,0 +1,3 @@ + +* Broker-level dependencies +* Subscriber-level dependencies diff --git a/search/docs/getting-started/dependencies/index.md b/search/docs/getting-started/dependencies/index.md new file mode 100644 index 0000000..647d07e --- /dev/null +++ b/search/docs/getting-started/dependencies/index.md @@ -0,0 +1,163 @@ +--- +nested: A nested dependency is called here +--- + +# Dependencies + +**FastStream** uses the secondary library [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"} for dependency management. +This dependency system is literally borrowed from **FastAPI**, so if you know how to work with that framework, you'll be comfortable with dependencies in **FastStream**. + +You can visit the [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"} documentation for more details, but the key points and **additions** are covered here. + +## Type Casting + +The key function in the dependency management and type conversion system in **FastStream** is the decorator `#!python @apply_types` (also known as `#!python @inject` in **FastDepends**). + +By default, it applies to all event handlers, unless you disabled the same option when creating the broker. + +{! includes/getting_started/dependencies/1.md !} + +!!! warning + Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`. + +This flag can be useful if you are using **FastStream** within another framework and you need to use its native dependency system. + +## Dependency Injection + +To implement dependencies in **FastStream**, a special class called **Depends** is used + +{! includes/getting_started/dependencies/2.md !} + +**The first step**: You need to declare a dependency, which can be any `Callable` object. + +??? note "Callable" + A "Callable" is an object that can be "called". It can be a function, a class, or a class method. + + In other words, if you can write code like `my_object()` - `my_object` is `Callable` + +{! includes/getting_started/dependencies/3.md !} + +**Second step**: Declare which dependencies you need using `Depends` + +{! includes/getting_started/dependencies/4.md !} + +**The last step**: Just use the result of executing your dependency! + +It's easy, isn't it? + +!!! tip "Auto `#!python @apply_types`" + In the code above, we didn't use this decorator for our dependencies. However, it still applies + to all functions used as dependencies. Please keep this in your mind. + +## Top-level Dependencies + +If you don't need a dependency result, you can use the following code: + +```python +@broker.subscriber("test") +def method(_ = Depends(...)): ... +``` + +But, using a special `subscriber` parameter is much more suitable: + +```python +@broker.subscriber("test", dependencies=[Depends(...)]) +def method(): ... +``` + +You can also declare broker-level dependencies, which will be applied to all broker's handlers: + +```python +broker = RabbitBroker(dependencies=[Depends(...)]) +``` + +## Nested Dependencies + +Dependencies can also contain other dependencies. This works in a very predictable way: just declare +`Depends` in the dependent function. + +{% import 'getting_started/dependencies/5.md' as includes with context %} +{{ includes }} + +!!! Tip "Caching" + In the example above, the `another_dependency` function will be called at **ONCE**! + **FastDepends** caches all dependency execution results within **ONE** `#!python @apply_types` call stack. + This means that all nested dependencies will receive the cached result of dependency execution. + But, between different calls of the main function, these results will be different. + + To prevent this behavior, just use `#!python Depends(..., cache=False)`. In this case, the dependency will be used for each function + in the call stack where it is used. + +## Use with Regular Functions + +You can use the decorator `#!python @apply_types` not only with `#!python @broker.subscriber(...)`, but also with regular functions, both synchronous and asynchronous. + +=== "Sync" + ```python hl_lines="3-4" linenums="1" +from faststream import Depends, apply_types + +def simple_dependency(a: int, b: int = 3): + return a + b + +@apply_types +def method(a: int, d: int = Depends(simple_dependency)): + return a + d + +def test_sync_dependency(): + assert method("1") == 5 + ``` + +=== "Async" + ```python hl_lines="5-6 8-9" linenums="1" +import asyncio +import pytest +from faststream import Depends, apply_types + +async def simple_dependency(a: int, b: int = 3): + return a + b + +def another_dependency(a: int): + return a + +@apply_types +async def method( + a: int, + b: int = Depends(simple_dependency), + c: int = Depends(another_dependency), +): + return a + b + c + +@pytest.mark.asyncio +async def test_async_dependency(): + assert 6 == await method("1") + ``` + + !!! tip "Be careful" + In asynchronous code, you can use both synchronous and asynchronous dependencies. + But in synchronous code, only synchronous dependencies are available to you. + +## Casting Dependency Types + +**FastDepends**, used by **FastStream**, also gives the type `return`. This means that the value returned by the dependency will be +be cast to the type twice: as `return` for dependencies and as the input argument of the main function. This does not incur additional costs if +these types have the same annotation. Just keep it in mind. Or not... Anyway, I've warned you. + +```python linenums="1" +from faststream import Depends, apply_types + +def simple_dependency(a: int, b: int = 3) -> str: + return a + b # 'return' is cast to `str` for the first time + +@inject +def method(a: int, d: int = Depends(simple_dependency)): + # 'd' is cast to `int` for the second time + return a + d + +assert method("1") == 5 +``` + +Also, the result of executing the dependency is cached. If you use this dependency in `N` functions, +this cached result will be converted to type `N` times (at the input to the function being used). + +To avoid problems with this, use [mypy](https://www.mypy-lang.org){.external-link target="_blank"} or just be careful with the annotation +of types in your project. diff --git a/search/docs/getting-started/dependencies/sub.md b/search/docs/getting-started/dependencies/sub.md new file mode 100644 index 0000000..e69de29 diff --git a/search/docs/getting-started/dependencies/testing.md b/search/docs/getting-started/dependencies/testing.md new file mode 100644 index 0000000..3588282 --- /dev/null +++ b/search/docs/getting-started/dependencies/testing.md @@ -0,0 +1,3 @@ + + +https://lancetnik.github.io/FastDepends/tutorial/overrides/ diff --git a/search/docs/getting-started/dependencies/yield.md b/search/docs/getting-started/dependencies/yield.md new file mode 100644 index 0000000..e69de29 diff --git a/search/docs/getting-started/index.md b/search/docs/getting-started/index.md new file mode 100644 index 0000000..fe59cdb --- /dev/null +++ b/search/docs/getting-started/index.md @@ -0,0 +1,39 @@ +--- +hide: + - toc +run_docker: To start a new project, we need a test broker container +--- + +# QUICK START + +Install using `pip`: + +{% import 'getting_started/index/install.md' as includes with context %} +{{ includes }} + +## Basic Usage + +To create a basic application, add the following code to a new file (e.g. `serve.py`): + +{! includes/getting_started/index/base.md !} + +And just run this command: + +```shell +faststream run serve:app +``` + +After running the command, you should see the following output: + +``` shell +INFO - FastStream app starting... +INFO - test | - `BaseHandler` waiting for messages +INFO - FastStream app started successfully! To exit, press CTRL+C +``` + +Enjoy your new development experience! + +??? tip "Don't forget to stop the test broker container" + ```bash + docker container stop test-mq + ``` diff --git a/search/docs/getting-started/integrations/fastapi/index.md b/search/docs/getting-started/integrations/fastapi/index.md new file mode 100644 index 0000000..724d94a --- /dev/null +++ b/search/docs/getting-started/integrations/fastapi/index.md @@ -0,0 +1,73 @@ +# **FastAPI** Plugin + +## Handling messages + +**FastStream** can be used as a part of **FastAPI**. + +Just import a **StreamRouter** you need and declare the message handler in the same way as with a regular **FastStream** application. + +!!! tip + When used in this way, **FastStream** does not use its own dependency system but integrates into **FastAPI**. + That is, you can use `Depends`, `BackgroundTasks` and other original **FastAPI** features as if it were a regular HTTP endpoint, but you can't use `faststream.Context` and `faststream.Depends`. + + Note that the code below uses `fastapi.Depends`, not `faststream.Depends`. + +{! includes/getting_started/integrations/fastapi/1.md !} + +When processing a message from a broker, the entire message body is placed simultaneously in both the `body` and `path` request parameters. You can access them in any way convenient for you. The message header is placed in `headers`. + +Also, this router can be fully used as an `HttpRouter` (of which it is the inheritor). So, you can +use it to declare any `get`, `post`, `put` and other HTTP methods. For example, this is done at **line 19**. + +!!! warning + If your **ASGI** server does not support installing **state** inside **lifespan**, you can disable this behavior as follows: + + ```python + router = StreamRouter(..., setup_state=False) + ``` + + However, after that, you will not be able to access the broker from your application's **state** (but it is still available as the `router.broker`). + +## Accessing the Broker Object + +Inside each router, there is a broker. You can easily access it if you need to send a message to MQ: + +{! includes/getting_started/integrations/fastapi/2.md !} + +Also, you can use the following `Depends` to access the broker if you want to use it at different parts of your program: + +{! includes/getting_started/integrations/fastapi/3.md !} + +Or you can access the broker from a **FastAPI** application state (if you don't disable it with `#!python setup_state=False`): + +```python +from fastapi import Request + +@app.get("/") +def main(request: Request): + broker = request.state.broker +``` + +## `@after_startup` + +The `FastStream` application has the `#!python @after_startup` hook, which allows you to perform operations with your message broker after the connection is established. This can be extremely convenient for managing your brokers' objects and/or sending messages. This hook is also available for your **FastAPI StreamRouter** + +{! includes/getting_started/integrations/fastapi/4.md !} + +## Documentation + +When using **FastStream** as a router for **FastAPI**, the framework automatically registers endpoints for hosting **AsyncAPI** documentation into your application with the following default values: + +{! includes/getting_started/integrations/fastapi/5.md !} + +This way, you will have three routes to interact with your application's **AsyncAPI** schema: + +* `/asyncapi` - the same as the [CLI created page](../../../getting-started/asyncapi/hosting.md){.internal-link} +* `/asyncapi.json` - download the **JSON** schema representation +* `/asyncapi.yaml` - download the **YAML** schema representation + +## Testing + +To test your **FastAPI StreamRouter**, you can still use it with the *TestClient*: + +{! includes/getting_started/integrations/fastapi/6.md !} diff --git a/search/docs/getting-started/integrations/frameworks/index.md b/search/docs/getting-started/integrations/frameworks/index.md new file mode 100644 index 0000000..6f06f21 --- /dev/null +++ b/search/docs/getting-started/integrations/frameworks/index.md @@ -0,0 +1,16 @@ +--- +# template variables +fastapi_plugin: If you want to use **FastStream** in conjunction with **FastAPI**, perhaps you should use a special [plugin](../fastapi/index.md){.internal-link} +no_hook: However, even if such a hook is not provided, you can do it yourself. +--- + +# INTEGRATIONS + +**FastStream** brokers are very easy to integrate with any of your applications: +it is enough to initialize the broker at startup and close it correctly at the end of +your application. + +Most HTTP frameworks have built-in lifecycle hooks for this. + +{% import 'getting_started/integrations/http/1.md' as includes with context %} +{{ includes }} diff --git a/search/docs/getting-started/lifespan/hooks.md b/search/docs/getting-started/lifespan/hooks.md new file mode 100644 index 0000000..c1ce42a --- /dev/null +++ b/search/docs/getting-started/lifespan/hooks.md @@ -0,0 +1,119 @@ +# Lifespan Hooks + +## Usage example + +Let's imagine that your application uses **pydantic** as your settings manager. + +!!! note "" + I highly recommend using **pydantic** for these purposes, because this dependency is already used at **FastStream** + and you don't have to install an additional package + +Also, let's imagine that you have several `.env`, `.env.development`, `.env.test`, `.env.production` files with your application settings, +and you want to switch them at startup without any code changes. + +By [passing optional arguments with the command line](../config/index.md){.internal-link} to your code **FastStream** allows you to do this easily. + +## Lifespan + +Let's write some code for our example + +{! includes/getting_started/lifespan/1.md !} + +Now this application can be run using the following command to manage the environment: + +```bash +faststream run serve:app --env .env.test +``` + +### Details + +Now let's look into a little more detail + +To begin with, we used a decorator + +{! includes/getting_started/lifespan/2.md !} + +to declare a function that should run when our application starts + +The next step is to declare the arguments that our function will receive + +{! includes/getting_started/lifespan/3.md !} + +In this case, the `env` field will be passed to the `setup` function from the arguments with the command line + +!!! tip + The default lifecycle functions are used with the decorator `#!python @apply_types`, + therefore, all [context fields](../context/index.md){.internal-link} and [dependencies](../dependencies/index.md){.internal-link} are available in them + +Then, we initialized the settings of our application using the file passed to us from the command line + +{! includes/getting_started/lifespan/4.md !} + +And put these settings in a global context + +{! includes/getting_started/lifespan/5.md !} + +??? note + Now we can access our settings anywhere in the application right from the context + + ```python + from faststream import Context, apply_types + @apply_types + async def func(settings = Context()): ... + ``` + +The last step we initialized our broker: now, when the application starts, it will be ready to receive messages + +{! includes/getting_started/lifespan/6.md !} + +## Another example + +Now let's imagine that we have a machine learning model that needs to process messages from some broker. + +Initialization of such models usually takes a long time. It would be wise to do this at the start of the application, and not when processing each message. + +You can initialize your model somewhere at the top of your module/file. However, in this case, this code will be run even just in case of importing +this module, for example, during testing. It is unlikely that you want to run your model on every test run... + +Therefore, it is worth initializing the model in the `#!python @app.on_startup` hook. + +Also, we don't want the model to finish its work incorrectly when the application is stopped. To avoid this, we need the hook `#!python @app.on_shutdown` + +{! includes/getting_started/lifespan/7.md !} + +## Multiple hooks + +If you want to declare multiple lifecycle hooks, they will be used in the order they are registered: + +```python linenums="1" hl_lines="6 11" +from faststream import Context, ContextRepo, FastStream + +app = FastStream() + + +@app.on_startup +async def setup(context: ContextRepo): + context.set_global("field", 1) + + +@app.on_startup +async def setup_later(field: int = Context()): + assert field == 1 +``` + +## Some more details + +### Async or not async + +In the asynchronous version of the application, both asynchronous and synchronous methods can be used as hooks. +In the synchronous version, only synchronous methods are available. + +### Command line arguments + +Command line arguments are available in all `#!python @app.on_startup` hooks. To use them in other parts of the application, put them in the `ContextRepo`. + +### Broker initialization + +The `#!python @app.on_startup` hooks are called **BEFORE** the broker is launched by the application. The `#!python @app.after_shutdown` hooks are triggered **AFTER** stopping the broker. + +If you want to perform some actions **AFTER** initializing the broker: send messages, initialize objects, etc., you should use the `#!python @app.after_startup` hook. diff --git a/search/docs/getting-started/lifespan/index.md b/search/docs/getting-started/lifespan/index.md new file mode 100644 index 0000000..f74e924 --- /dev/null +++ b/search/docs/getting-started/lifespan/index.md @@ -0,0 +1,11 @@ +# Lifespan Events + +Sometimes you need to define the logic that should be executed before launching the application. +This means that the code will be executed once - even before your application starts receiving messages. + +Also, you may need to terminate some processes after stopping the application. In this case, your code will also be executed exactly once: +but after the completion of the main application. + +Since this code is executed before the application starts and after it stops, it covers the entire lifecycle *(lifespan)* of the application. + +This can be very useful for initializing your application settings at startup, raising a pool of connections to a database, or running machine learning models. diff --git a/search/docs/getting-started/lifespan/test.md b/search/docs/getting-started/lifespan/test.md new file mode 100644 index 0000000..58887e1 --- /dev/null +++ b/search/docs/getting-started/lifespan/test.md @@ -0,0 +1,12 @@ +# Events Testing + +In the most cases you are testing your subsriber/publisher functions, but sometimes you need to trigger some lifespan hooks in your tests too. + +For this reason, **FastStream** has a special **TestApp** patcher working as a regular async context manager. + +{! includes/getting_started/lifespan/testing.md !} + +!!! tip + If you are using a connected broker inside withing your lifespan hooks, it's advisable to patch the broker first (before applying the application patch). + + Also, because `FastStream` calls `#!python broker.start()` inside, you need to prevent `TestClient` broker starting to respect the original lifespan hooks ordering by `#!python connect_only=True` option. Without this one, all `FastStream` hooks will be called after broker was started, what can breaks some `@app.on_startup` logic. diff --git a/search/docs/getting-started/logging.md b/search/docs/getting-started/logging.md new file mode 100644 index 0000000..efd23eb --- /dev/null +++ b/search/docs/getting-started/logging.md @@ -0,0 +1,208 @@ +# Application and Access Logging + +**FastStream** uses two previously configured loggers: + +* `faststream` - used by `FastStream` app +* `faststream.access` - used by the broker + +## Logging Requests + +To log requests, it is strongly recommended to use the `access_logger` of your broker, as it is available from the [Context](../getting-started/context/existed.md){.internal-link} of your application. + +```python +from faststream import Logger +from faststream.rabbit import RabbitBroker + +broker = RabbitBroker() + +@broker.subscriber("test") +async def func(logger: Logger): + logger.info("message received") +``` + +This approach offers several advantages: + +* The logger already contains the request context, including the message ID and broker-based parameters. +* By replacing the `logger` when initializing the broker, you will automatically replace all loggers inside your functions. + +## Logging Levels + +If you use the **FastStream CLI**, you can change the current logging level of the entire application directly from the command line. + +The `--log-level` flag sets the current logging level for both the broker and the `FastStream` app. This allows you to configure the levels of not only the default loggers but also your custom loggers, if you use them inside **FastStream**. + +```console +faststream run serve:app --log-level debug +``` + +If you want to completely disable the default logging of `FastStream`, you can set `logger=None` + +```python +from faststream import FastStream +from faststream.rabbit import RabbitBroker + +broker = RabbitBroker(logger=None) # Disables broker logs +app = FastStream(broker, logger=None) # Disables application logs +``` + +!!! warning + Be careful: the `logger` that you get from the context will also have the value `None` if you turn off broker logging. + +If you don't want to lose access to the `logger' inside your context but want to disable the default logs of **FastStream**, you can lower the level of logs that the broker publishes itself. + +```python +import logging +from faststream.rabbit import RabbitBroker + +# Sets the broker logs to the DEBUG level +broker = RabbitBroker(log_level=logging.DEBUG) +``` + +## Formatting Logs + +If you are not satisfied with the current format of your application logs, you can change it directly in your broker's constructor. + +```python +from faststream.rabbit import RabbitBroker +broker = RabbitBroker(log_fmt="%(asctime)s %(levelname)s - %(message)s") +``` + +## Logger Access + +If you want to override default logger's behavior, you can access them directly via `logging`. + +```python +import logging +logger = logging.getLogger("faststream") +access_logger = logging.getLogger("faststream.access") +``` + +Or you can import them from **FastStream**. + +```python +from faststream.log import access_logger, logger +``` + +## Using Your Own Loggers + +Since **FastStream** works with the standard `logging.Logger` object, you can initiate an application and a broker +using your own logger. + +```python +import logging +from faststream import FastStream +from faststream.rabbit import RabbitBroker + +logger = logging.getLogger("my_logger") + +broker = RabbitBroker(logger=logger) +app = FastStream(broker, logger=logger) +``` + +!!! note + Doing this, you doesn't change the **CLI** logs behavior (*multiprocessing* and *hot reload* logs). This was done to keep your log storage clear of unnecessary stuff. + + This logger will be used only for `FastStream` and `StreamBroker` service messages and will be passed to your function through the **Context**. + +By doing this, you will lose information about the context of the current request. However, you can retrieve it directly from the context anywhere in your code. + +```python +from faststream import context +log_context: dict[str, str] = context.get_local("log_context") +``` + +This way, all broker handlers can get access to your broker logger right from the context: + +```python +from faststream import Logger + +@broker.subscriber(...) +async def handler( + msg, + logger: Logger, # <-- YOUR logger here +): + logger.info(msg) +``` + +### Structlog Example + +[**Structlog**](https://www.structlog.org/en/stable){.external-link target="_blank"} is a production-ready logging solution for Python. It can be easely integrated with any log storage system, making it suitable for use in production projects. + +Here is a quick tutorial on integrating **Structlog** with **FastStream**: + +Start with the **Structlog** [guide](https://www.structlog.org/en/stable/logging-best-practices.html#pretty-printing-vs-structured-output){.external-link target="_blank"} example: + +```python linenums="1" hl_lines="11 14 20" +import sys +import structlog + +shared_processors = [ + structlog.processors.add_log_level, + structlog.processors.StackInfoRenderer(), + structlog.dev.set_exc_info, + structlog.processors.TimeStamper(fmt="iso"), +] + +if sys.stderr.isatty(): + # terminal session + processors = shared_processors + [ + structlog.dev.ConsoleRenderer() + ] +else: + # Docker container session + processors = shared_processors + [ + structlog.processors.dict_tracebacks, + structlog.processors.JSONRenderer(), + ] + +structlog.configure( + processors=processors, + logger_factory=structlog.PrintLoggerFactory(), + cache_logger_on_first_use=False, +) + +logger = structlog.get_logger() +``` + +We created a logger that prints messages to the console in a user-friendly format during development and uses **JSON**-formatted logs in production. + +To integrate this logger with our **FastStream** application, we just need to access it through context information and pass it to our objects: + +```python linenums="1" hl_lines="11 15 20 26-27" +import logging + +import structlog + +from faststream import FastStream, context +from faststream.kafka import KafkaBroker + +def merge_contextvars( + logger: structlog.types.WrappedLogger, + method_name: str, + event_dict: structlog.types.EventDict, +) -> structlog.types.EventDict: + event_dict["extra"] = event_dict.get( + "extra", + context.get("log_context", {}), + ) + return event_dict + +shared_processors = [ + merge_contextvars, + ... +] + +... + +broker = KafkaBroker(logger=logger, log_level=logging.DEBUG) +app = FastStream(broker, logger=logger) +``` + +And the job is done! Now you have a perfectly structured logs using **Structlog**. + +```bash +TIMESPAMP [info ] FastStream app starting... extra={} +TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group', 'message_id': ''} +TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group2', 'message_id': ''} +TIMESPAMP [info ] FastStream app started successfully! To exit, press CTRL+C extra={'topic': '', 'group_id': '', 'message_id': ''} +``` diff --git a/search/docs/getting-started/middlewares/index.md b/search/docs/getting-started/middlewares/index.md new file mode 100644 index 0000000..45d4037 --- /dev/null +++ b/search/docs/getting-started/middlewares/index.md @@ -0,0 +1,85 @@ +# Middlewares + +**Middlewares** are a powerful mechanism that allows you to add additional logic to any stage of the message processing pipeline. + +This way, you can greatly extend your **FastStream** application with features such as: + +* Integration with any logging/metrics systems +* Application-level message serialization logic +* Rich publishing of messages with extra information +* And many other capabilities + +**Middlewares** have several methods to override. You can implement some or all of them and use middlewares at the broker, router, or subscriber level. Thus, middlewares are the most flexible **FastStream** feature. + +## Message Receive Wrapper + +Unfortunately, this powerful feature has a somewhat complex signature too. + +Using middlewares, you can wrap the entire message processing pipeline. In this case, you need to specify `on_receive` and `after_processed` methods: + +``` python +from faststream import BaseMiddleware + +class MyMiddleware(BaseMiddleware): + async def on_receive(self): + print(f"Received: {self.message}") + return await super().on_receive() + + async def after_processed(self, exc_type, exc_val, exec_tb): + return await super().after_processed(exc_type, exc_val, exec_tb) +``` + +These methods should be overwritten only in a broker-level middlewares. + +```python +Broker(middlewares=[MyMiddleware]) +``` + +In other cases, `on_receive` will be called at every subscriber filter function call. + +!!! tip + Please always call `#!python super()` methods at the end of your function; this is important for correct error processing. + +## Message Consuming Wrapper + +Also, using middlewares, you are able to wrap consumer function calls directly. + +In this case, you need to specify `on_receive` and `after_processed` methods: + +``` python +from typing import Optional + +from faststream import BaseMiddleware: +from faststream.types import DecodedMessage + +class MyMiddleware(BaseMiddleware): + async def on_consume(self, msg: DecodedMessage) -> DecodedMessage: + return await super().on_consume(msg) + + async def after_consume(self, err: Optional[Exception]) -> None: + return await super().after_consume(err) +``` + +This way, you can patch the incoming message body right before passing it to your consumer subscriber. + +Also, if you have multiple filters for one subscriber, these methods will be called at once when the filtering is completed successfully. + +## Message Publishing Wrapper + +Finally, using middlewares, you are able to patch outgoing messages too. For example, you can compress/encode outgoing messages at the application level. + +In this, case you need to specify `on_publish` and `after_publish` methods: + +``` python +from typing import Optional + +from faststream import BaseMiddleware: +from faststream.types import SendableMessage + +class MyMiddleware(BaseMiddleware): + async def on_publish(self, msg: SendableMessage) -> SendableMessage: + return await super().on_publish(msg) + + async def after_publish(self, err: Optional[Exception]) -> None: + return await super().after_publish(err) +``` diff --git a/search/docs/getting-started/publishing/broker.md b/search/docs/getting-started/publishing/broker.md new file mode 100644 index 0000000..0a3f1d0 --- /dev/null +++ b/search/docs/getting-started/publishing/broker.md @@ -0,0 +1,20 @@ +# Broker Publishing + +The easiest way to publish a message is to use a Broker, which allows you to use it as a publisher client in any applications. + +In the **FastStream** project, this call is not represented in the **AsyncAPI** scheme. You can use it to send rarely-publishing messages, such as startup or shutdown events. + +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/broker_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/broker_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/broker_nats.py !} + ``` diff --git a/search/docs/getting-started/publishing/decorator.md b/search/docs/getting-started/publishing/decorator.md new file mode 100644 index 0000000..3239ec1 --- /dev/null +++ b/search/docs/getting-started/publishing/decorator.md @@ -0,0 +1,34 @@ +# Publisher Decorator + +The second easiest way to publish messages is by using the Publisher Decorator. This method has an AsyncAPI representation and is suitable for quickly creating applications. However, it doesn't provide all testing features. + +It creates a structured DataPipeline unit with an input and output. The order of Subscriber and Publisher decorators doesn't matter, but they can only be used with functions decorated by a `subscriber` as well. + +It uses the handler function's return type annotation to cast the function's return value before sending, so be accurate with it: + +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/decorator_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/decorator_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/decorator_nats.py !} + ``` + +It can be used multiple times with one function to broadcast the function's return: + +```python +@broker.subscriber("in") +@broker.publisher("first-out") +@broker.publisher("second-out") +async def handle(msg) -> str: + return "Response" +``` + +Additionally, it automatically sends a message with the same `correlation_id` as the incoming message. This way, you get the same `correlation_id` for the entire message pipeline process across all services, allowing you to collect a trace. diff --git a/search/docs/getting-started/publishing/direct.md b/search/docs/getting-started/publishing/direct.md new file mode 100644 index 0000000..d478289 --- /dev/null +++ b/search/docs/getting-started/publishing/direct.md @@ -0,0 +1,27 @@ +# Publisher Direct Usage + +The Publisher Direct Usage is a full-featured way to publish messages. It has AsyncAPI representation and includes testable features. This method creates a reusable Publisher object that can be used directly to publish a message: + +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_nats.py !} + ``` + +It is suitable for publishing different messages to different outputs within the same processing function: + +```python +@broker.subscriber("in") +async def handle(msg) -> str: + await publisher1.publish("Response-1") + await publisher2.publish("Response-2") +``` diff --git a/search/docs/getting-started/publishing/index.md b/search/docs/getting-started/publishing/index.md new file mode 100644 index 0000000..76a833a --- /dev/null +++ b/search/docs/getting-started/publishing/index.md @@ -0,0 +1,35 @@ +# Publishing Basics + +**FastStream** is broker-agnostic and easy to use, even as a client in non-FastStream applications. + +It offers several use cases for publishing messages: + +* Using `broker.publish`` +* Using a decorator +* Using a publisher object decorator +* Using a publisher object directly + +**FastStream** allows you to publish any JSON-serializable messages (Python types, Pydantic models, etc.) or raw bytes. + +It automatically sets up all required headers, especially the correlation_id, which is used to trace message processing pipelines across all services. + +To publish a message, simply set up the message content and a routing key: + +=== "Kafka" + ```python + async with KafkaBroker() as br: + await br.publish("message", "topic") + ``` + +=== "RabbitMQ" + ```python + async with RabbitBroker() as br: + await br.publish("message", "queue") + ``` + + +=== "NATS" + ```python + async with NatsBroker() as br: + await br.publish("message", "queue") + ``` diff --git a/search/docs/getting-started/publishing/object.md b/search/docs/getting-started/publishing/object.md new file mode 100644 index 0000000..0f2f9ae --- /dev/null +++ b/search/docs/getting-started/publishing/object.md @@ -0,0 +1,34 @@ +# Publisher Object + +The Publisher Object provides a full-featured way to publish messages. It has **AsyncAPI** representation and includes testable features. This method creates a reusable Publisher object. + +It can be used as a function decorator. The order of Subscriber and Publisher decorators doesn't matter, but they can only be used with functions decorated by a `subscriber` decorator. + +It also uses the handler function's return type annotation to cast the function's return value before sending, so be accurate with it: + +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_nats.py !} + ``` + +You can use it multiple times with one function to broadcast the function's return: + +```python +@publisher1 +@publisher2 +@broker.subscriber("in") +async def handle(msg) -> str: + return "Response" +``` + +Additionally, it automatically sends a message with the same `correlation_id` as the incoming message. This way, you get the same correlation_id for the entire message pipeline process across all services, allowing you to collect a trace. diff --git a/search/docs/getting-started/publishing/test.md b/search/docs/getting-started/publishing/test.md new file mode 100644 index 0000000..aae0f45 --- /dev/null +++ b/search/docs/getting-started/publishing/test.md @@ -0,0 +1,62 @@ +# Publisher Testing + +If you are working with a Publisher object (either decorator or direct), you can check outgoing messages as well. There are several testing features available: + +* In-memory TestClient +* Publishing (including error handling) +* Checking the incoming message body +* Note about mock clearing after the context exits + +## Base application + +=== "Decorator" +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_kafka.py[ln:7-12] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_rabbit.py[ln:7-12] !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_nats.py[ln:7-12] !} + ``` + +=== "Direct" +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_kafka.py[ln:7-11] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_rabbit.py[ln:7-11] !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/direct_nats.py[ln:7-11] !} + ``` + +## Testing + +=== "Kafka" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_kafka_testing.py [ln:1-3,7-12] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_rabbit_testing.py [ln:1-3,7-12] !} + ``` + +=== "NATS" + ```python linenums="1" + {!> docs_src/getting_started/publishing/object_nats_testing.py [ln:1-3,7-12] !} + ``` + +* Testing with a real broker +* Waiting for the consumer to be called diff --git a/search/docs/getting-started/routers/index.md b/search/docs/getting-started/routers/index.md new file mode 100644 index 0000000..cba596a --- /dev/null +++ b/search/docs/getting-started/routers/index.md @@ -0,0 +1,51 @@ +--- +# template variables +note_decor: Now you can use the created router to register handlers and publishers as if it were a regular broker +note_include: Then you can simply include all the handlers declared using the router in your broker +note_publish: Please note that when publishing a message, you now need to specify the same prefix that you used when creating the router +--- + +# Broker Router + +Sometimes you want to: + +* split an application into includable modules +* separate business logic from your handler registration +* apply some [decoder](../serialization/index.md)/[middleware](../middlewares/index.md)/[dependencies](../dependencies/global.md) to a subscribers group + +For these reasons, **FastStream** has a special *Broker Router*. + +## Router Usage + +First, you need to import the *Broker Router* from the same module from where you imported the broker. + +!!! note "" + When creating a *Broker Router*, you can specify a prefix that will be automatically applied to all subscribers and publishers of this router. + +{% import 'getting_started/routers/1.md' as includes with context %} +{{ includes }} + +!!! tip + Also, when creating a *Broker Router*, you can specify [middleware](../middlewares/index.md), [dependencies](../dependencies/global.md), [parser](../serialization/parser.md) and [decoder](../serialization/decoder.md) to apply them to all subscribers declared via this router. + +## Delay Handler Registration + +If you want to separate your application's core logic from **FastStream**'s routing logic, you can write some core functions and use them as *Broker Router* `handlers` later: + +=== "Kafka" + ```python linenums="1" hl_lines="2 8 13 15" + {!> docs_src/getting_started/routers/router_delay_kafka.py [ln:1-15] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="2 8 13 15" + {!> docs_src/getting_started/routers/router_delay_rabbit.py [ln:1-15] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="2 8 13 15" + {!> docs_src/getting_started/routers/router_delay_nats.py [ln:1-15] !} + ``` + +!!! warning + Be careful, this way you won't be able to test your handlers with a [`mock`](../subscription/test.md) object. diff --git a/search/docs/getting-started/serialization/decoder.md b/search/docs/getting-started/serialization/decoder.md new file mode 100644 index 0000000..d3aac74 --- /dev/null +++ b/search/docs/getting-started/serialization/decoder.md @@ -0,0 +1,84 @@ +# Custom Decoder + +At this stage, the body of a **StreamMessage** is transformed into the format that it will take when it enters your handler function. This stage is the one you will need to redefine more often. + +## Signature + +The original decoder function has a relatively simple signature (this is a simplified version): + +=== "Kafka" + ``` python + from faststream.types import DecodedMessage + from faststream.kafka import KafkaMessage + + def decoder(msg: KafkaMessage) -> DecodedMessage: + ... + ``` + +=== "RabbitMQ" + ``` python + from faststream.types import DecodedMessage + from faststream.rabbit import RabbitMessage + + def decoder(msg: RabbitMessage) -> DecodedMessage: + ... + ``` + +=== "NATS" + ``` python + from faststream.types import DecodedMessage + from faststream.nats import NatsMessage + + def decoder(msg: NatsMessage) -> DecodedMessage: + ... + ``` + +Alternatively, you can reuse the original decoder function with the following signature: + +=== "Kafka" + ``` python + from types import Callable, Awaitable + from faststream.types import DecodedMessage + from faststream.kafka import KafkaMessage + + async def decoder( + msg: KafkaMessage, + original_decoder: Callable[[KafkaMessage], Awaitable[DecodedMessage]], + ) -> DecodedMessage: + return await original_decoder(msg) + ``` + +=== "RabbitMQ" + ``` python + from types import Callable, Awaitable + from faststream.types import DecodedMessage + from faststream.rabbit import RabbitMessage + + async def decoder( + msg: RabbitMessage, + original_decoder: Callable[[RabbitMessage], Awaitable[DecodedMessage]], + ) -> DecodedMessage: + return await original_decoder(msg) + ``` + +=== "NATS" + ``` python + from types import Callable, Awaitable + from faststream.types import DecodedMessage + from faststream.nats import NatsMessage + + async def decoder( + msg: NatsMessage, + original_decoder: Callable[[NatsMessage], Awaitable[DecodedMessage]], + ) -> DecodedMessage: + return await original_decoder(msg) + ``` + +!!! note + The original decoder is always an asynchronous function, so your custom decoder should also be asynchronous. + +Afterward, you can set this custom decoder at the broker or subscriber level. + +## Example + +You can find examples of *Protobuf* and *Msgpack* serialization in the [next article](./examples.md){.internal-link}. diff --git a/search/docs/getting-started/serialization/examples.md b/search/docs/getting-started/serialization/examples.md new file mode 100644 index 0000000..9283306 --- /dev/null +++ b/search/docs/getting-started/serialization/examples.md @@ -0,0 +1,123 @@ +# Serialization examples + +## Protobuf + +In this section, we will explore an example using *Protobuf*. However, this approach is also applicable to other serialization methods. + +???- note "Protobuf" + *Protobuf* is an alternative message serialization method commonly used in *GRPC*. Its main advantage is that it results in much smaller message sizes[^1] compared to *JSON*, but it requires a message schema (`.proto` files) on both the client and server sides. + +To begin, install the necessary dependencies: + +```console +pip install grpcio-tools +``` + +Next, let's define the schema for our message: + +```proto title="message.proto" +syntax = "proto3"; + +message Person { + string name = 1; + float age = 2; +} +``` + +Now, generate a *Python* class to work with messages in *Protobuf format*: + +```console +python -m grpc_tools.protoc --python_out=. --pyi_out=. -I . message.proto +``` + +This generates two files: `message_pb2.py` and `message_pb2.pyi`. We can use the generated class to serialize our messages: + +``` python linenums="1" hl_lines="1 10-13 16 23" +from message_pb2 import Person + +from faststream import FastStream, Logger, NoCast +from faststream.rabbit import RabbitBroker, RabbitMessage + +broker = RabbitBroker() +app = FastStream(broker) + + +async def decode_message(msg: RabbitMessage) -> Person: + decoded = Person() + decoded.ParseFromString(msg.body) + return decoded + + +@broker.subscriber("test", decoder=decode_message) +async def consume(body: NoCast[Person], logger: Logger): + logger.info(body) + + +@app.after_startup +async def publish(): + body = Person(name="John", age=25).SerializeToString() + await broker.publish(body, "test") +``` + +Note that we used the `NoCast` annotation to exclude the message from the `pydantic` representation of our handler. + +``` python +async def consume(body: NoCast[Person], logger: Logger): +``` + +## Msgpack + +*Msgpack* is another alternative binary data format. Its main advantage is that it results in smaller message sizes[^2] compared to *JSON*, although slightly larger than *Protobuf*. The key advantage is that it doesn't require a message schema, making it easy to use in most cases. + +To get started, install the necessary dependencies: + +```console +pip install msgpack +``` + +Since there is no need for a schema, you can easily write a *Msgpack* decoder: + +``` python linenums="1" hl_lines="1 10-11 14 21" +import msgpack + +from faststream import FastStream, Logger +from faststream.rabbit import RabbitBroker, RabbitMessage + +broker = RabbitBroker() +app = FastStream(broker) + + +async def decode_message(msg: RabbitMessage): + return msgpack.loads(msg.body) + + +@broker.subscriber("test", decoder=decode_message) +async def consume(name: str, age: int, logger: Logger): + logger.info(f"{name}: {age}") + + +@app.after_startup +async def publish(): + body = msgpack.dumps({"name": "John", "age": 25}, use_bin_type=True) + await broker.publish(body, "test") +``` + +Using *Msgpack* is much simpler than using *Protobuf* schemas. Therefore, if you don't have strict message size limitations, you can use *Msgpack* serialization in most cases. + +## Tips + +### Data Compression + +If you are dealing with very large messages, consider compressing them as well. You can explore libraries such as [**lz4**](https://github.com/python-lz4/python-lz4){.external-link targer="_blank"} or [**zstd**](https://github.com/sergey-dryabzhinsky/python-zstd){.external-link targer="_blank"} for compression algorithms. + +Compression can significantly reduce message size, especially if there are repeated blocks. However, in the case of small message bodies, data compression may increase the message size. Therefore, you should assess the compression impact based on your specific application requirements. + +### Broker-Level Serialization + +You can still set a custom `decoder` at the Broker or Router level. However, if you want to automatically encode publishing messages as well, you should explore [Middleware](../middlewares/index.md){.internal-link} for serialization implimentation. + +[^1]: + For example, a message like `#!json { "name": "John", "age": 25 }` in *JSON* takes **27** bytes, while in *Protobuf*, it takes only **11** bytes. With lists and more complex structures, the savings can be even more significant (up to 20x times). + +[^2]: + A message with *Msgpack* serialization, such as `#!json { "name": "John", "age": 25 }`, takes **16** bytes. diff --git a/search/docs/getting-started/serialization/index.md b/search/docs/getting-started/serialization/index.md new file mode 100644 index 0000000..d2ddd23 --- /dev/null +++ b/search/docs/getting-started/serialization/index.md @@ -0,0 +1,20 @@ +# Custom Serialization + +By default, **FastStream** uses the *JSON* format to send and receive messages. However, if you need to handle messages in other formats or with additional serialization steps, such as *gzip*, *lz4*, *Avro*, *Protobuf* or *Msgpack*, you can easily modify the serialization logic. + +## Serialization Steps + +Before the message reaches your subscriber, **FastStream** applies two functions to it sequentially: `parse_message` and `decode_message`. You can modify one or both stages depending on your needs. + +### Message Parsing + +At this stage, **FastStream** serializes an incoming message from the broker's framework into a general format called - **StreamMessage**. During this stage, the message body remains in the form of raw bytes. + +!!! warning "" + This stage is closely related to the features of the broker used, and in most cases, redefining it is not necessary. + +The parser declared at the `broker` level will be applied to all subscribers. The parser declared at the `subscriber` level is applied only to that specific subscriber and overrides the `broker' parser if specified. + +### Message Decoding + +At this stage, the body of the **StreamMessage** is transformed into a format suitable for processing within your subscriber function. This is the stage you may need to redefine more often. diff --git a/search/docs/getting-started/serialization/parser.md b/search/docs/getting-started/serialization/parser.md new file mode 100644 index 0000000..9819d7e --- /dev/null +++ b/search/docs/getting-started/serialization/parser.md @@ -0,0 +1,106 @@ +# Custom Parser + +At this stage, **FastStream** serializes an incoming message from the broker's framework into a general format called **StreamMessage**. During this stage, the message body remains in the form of raw bytes. + +**StreamMessage** is a general representation of a message within **FastStream**. It contains all the information required for message processing within **FastStreams**. It is even used to represent message batches, so the primary reason to customize it is to redefine the metadata associated with **FastStream** messages. + +For example, you can specify your own header with the `message_id` semantic. This allows you to inform **FastStream** about this custom header through parser customization. + +## Signature + +To create a custom message parser, you should write a regular Python function (synchronous or asynchronous) with the following signature: + +=== "Kafka" + ``` python + from aiokafka import ConsumerRecord + from faststream.kafka import KafkaMessage + + def parser(msg: ConsumerRecord) -> KafkaMessage: + ... + ``` + +=== "RabbitMQ" + ``` python + from aio_pika import IncomingMessage + from faststream.rabbit import RabbitMessage + + def parser(msg: IncomingMessage) -> RabbitMessage: + ... + ``` + +=== "NATS" + ``` python + from nats.aio.msg import Msg + from faststream.nats import NatsMessage + + def parser(msg: Msg) -> NatsMessage: + ... + ``` + +Alternatively, you can reuse the original parser function with the following signature: + +=== "Kafka" + ``` python + from types import Callable, Awaitable + from aiokafka import ConsumerRecord + from faststream.kafka import KafkaMessage + + async def parser( + msg: ConsumerRecord, + original_parser: Callable[[ConsumerRecord], Awaitable[KafkaMessage]], + ) -> KafkaMessage: + return await original_parser(msg) + ``` + +=== "RabbitMQ" + ``` python + from types import Callable, Awaitable + from aio_pika import IncomingMessage + from faststream.rabbit import RabbitMessage + + async def parser( + msg: IncomingMessage, + original_parser: Callable[[IncomingMessage], Awaitable[RabbitMessage]], + ) -> RabbitMessage: + return await original_parser(msg) + ``` + +=== "NATS" + ``` python + from types import Callable, Awaitable + from nats.aio.msg import Msg + from faststream.nats import NatsMessage + + async def parser( + msg: Msg, + original_parser: Callable[[Msg], Awaitable[NatsMessage]], + ) -> RabbitMessage: + return await original_parser(msg) + ``` + +The argument naming doesn't matter; the parser will always be placed as the second argument. + +!!! note + The original parser is always an asynchronous function, so your custom parser should also be asynchronous. + +Afterward, you can set this custom parser at the broker or subscriber level. + +## Example + +As an example, let's redefine `message_id` to a custom header: + + +=== "Kafka" + ``` python linenums="1" hl_lines="9-15 18 28" + {!> docs_src/getting_started/serialization/parser_kafka.py !} + ``` + +=== "RabbitMQ" + ``` python linenums="1" hl_lines="9-15 18 28" + {!> docs_src/getting_started/serialization/parser_rabbit.py !} + ``` + +=== "NATS" + ``` python linenums="1" hl_lines="9-15 18 28" + {!> docs_src/getting_started/serialization/parser_nats.py !} + ``` diff --git a/search/docs/getting-started/subscription/annotation.md b/search/docs/getting-started/subscription/annotation.md new file mode 100644 index 0000000..2ac5b8a --- /dev/null +++ b/search/docs/getting-started/subscription/annotation.md @@ -0,0 +1,61 @@ +# Annotation Serialization + +## Basic usage + +As you already know, **FastStream** serializes your incoming message body according to the function type annotations using [**Pydantic**](https://docs.pydantic.dev){.external-link target="_blank"}. + +So, there are some valid usecases: + +```python +@broker.subscriber("test") +async def handle(msg: str): + ... + +@broker.subscriber("test") +async def handle(msg: bytes): + ... + +@broker.subscriber("test") +async def handle(msg: int): + ... +``` + +As with other Python primitive types as well (`#!python float`, `#!python bool`, `#!python datetime`, etc) + +!!! note + If the incoming message cannot be serialized by the described schema, **FastStream** raises a `pydantic.ValidationError` with a correct log message. + +Also, thanks to **Pydantic** (again), **FastStream** is able to serialize (and validate) more complex types like `pydantic.HttpUrl`, `pydantic.PostitiveInt`, etc. + +## JSON Basic Serialization + +But how can we serialize more complex message, like `#!json { "name": "John", "user_id": 1 }` ? + +For sure, we can serialize it as a simple `#!python dict` + +```python +from typing import Dict, Any + +@broker.subscriber("test") +async def handle(msg: dict[str, Any]): + ... +``` + +But it doesn't looks like a correct message validation, does it? + +For this reason, **FastStream** supports per-argument message serialization: you can declare multiple arguments with various types and your message will unpack to them: + +=== "Kafka" + ```python + {!> docs_src/getting_started/subscription/annotation_kafka.py [ln:8-11] !} + ``` + +=== "RabbitMQ" + ```python + {!> docs_src/getting_started/subscription/annotation_rabbit.py [ln:8-11] !} + ``` + +=== "NATS" + ```python + {!> docs_src/getting_started/subscription/annotation_nats.py [ln:8-11] !} + ``` diff --git a/search/docs/getting-started/subscription/filtering.md b/search/docs/getting-started/subscription/filtering.md new file mode 100644 index 0000000..661facc --- /dev/null +++ b/search/docs/getting-started/subscription/filtering.md @@ -0,0 +1,60 @@ +# Application-level Filtering + +**FastStream** also allows you to specify the message processing way using message headers, body type or something else. The `filter` feature enables you to consume various messages with different schemas within a single event stream. + +!!! tip + Message must be consumed at ONCE (crossing filters are not allowed) + +As an example, let's create a subscriber for both `JSON` and non-`JSON` messages: + +=== "Kafka" + ```python linenums="1" hl_lines="10 17" + {!> docs_src/getting_started/subscription/filter_kafka.py [ln:1-19] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="10 17" + {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:1-19] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="10 17" + {!> docs_src/getting_started/subscription/filter_nats.py [ln:1-19] !} + ``` + +!!! note + A subscriber without a filter is a default subscriber. It consumes messages that have not been consumed yet. + +For now, the following message will be delivered to the `handle` function + +=== "Kafka" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_kafka.py [ln:24-27] !} + ``` + +=== "RabbitMQ" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:24-27] !} + ``` + +=== "NATS" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_nats.py [ln:24-27] !} + ``` + +And this one will be delivered to the `default_handler` + +=== "Kafka" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_kafka.py [ln:29-32] !} + ``` + +=== "RabbitMQ" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:29-32] !} + ``` + +=== "NATS" + ```python hl_lines="2" + {!> docs_src/getting_started/subscription/filter_nats.py [ln:29-32] !} + ``` diff --git a/search/docs/getting-started/subscription/index.md b/search/docs/getting-started/subscription/index.md new file mode 100644 index 0000000..a155fff --- /dev/null +++ b/search/docs/getting-started/subscription/index.md @@ -0,0 +1,138 @@ +# Subscription Basics + +**FastStream** provides a Message Broker agnostic way to subscribe to event streams. + +You need not even know about topics/queues/subjects or any broker inner objects you use. +The basic syntax is the same for all brokers: + +=== "Kafka" + ```python + from faststream.kafka import KafkaBroker + + broker = KafkaBroker() + + @broker.subscriber("test") # topic name + async def handle_msg(msg_body): + ... + ``` + +=== "RabbitMQ" + ```python + from faststream.rabbit import RabbitBroker + + broker = RabbitBroker() + + @broker.subscriber("test") # queue name + async def handle_msg(msg_body): + ... + ``` + +=== "NATS" + ```python + from faststream.nats import NatsBroker + + broker = NatsBroker() + + @broker.subscriber("test") # subject name + async def handle_msg(msg_body): + ... + ``` + +!!! tip + If you want to use Message Broker specific features, please visit the corresponding broker documentation section. + In the **Tutorial** section, the general features are described. + +Also, synchronous functions are supported as well: + +=== "Kafka" + ```python + from faststream.kafka import KafkaBroker + + broker = KafkaBroker() + + @broker.subscriber("test") # topic name + def handle_msg(msg_body): + ... + ``` + +=== "RabbitMQ" + ```python + from faststream.rabbit import RabbitBroker + + broker = RabbitBroker() + + @broker.subscriber("test") # queue name + def handle_msg(msg_body): + ... + ``` + +=== "NATS" + ```python + from faststream.nats import NatsBroker + + broker = NatsBroker() + + @broker.subscriber("test") # subject name + def handle_msg(msg_body): + ... + ``` + +## Message Body Serialization + +Generally, **FastStream** uses your function type annotation to serialize incoming message body with [**Pydantic**](https://docs.pydantic.dev){.external-link target="_blank"}. This is similar to how [**FastAPI**](https://fastapi.tiangolo.com){.external-link target="_blank"} works (if you are familiar with it). + +```python +@broker.subscriber("test") +async def handle_str(msg_body: str): + ... +``` + +You can also access some extra features through the function arguments, such as [Depends](../dependencies/index.md){.internal-link} and [Context](../context/existed.md){.internal-link} if required. + +However, you can easily disable Pydantic validation by creating a broker with the following option `#!python Broker(apply_types=False)` (this also disables Context and Depends features). + +This way **FastStream** still consumes `#!python json.loads` result, but without pydantic validation and casting. + +=== "Kafka" + ```python + from faststream.kafka import KafkaBroker + + broker = KafkaBroker(apply_types=False) + + @broker.subscriber("test") + async def handle_msg(msg_body: str): # just an annotation, has no real effect + ... + ``` + +=== "RabbitMQ" + ```python + from faststream.rabbit import RabbitBroker + + broker = RabbitBroker(apply_types=False) + + @broker.subscriber("test") + async def handle_msg(msg_body: str): # just an annotation, has no real effect + ... + ``` + +=== "NATS" + ```python + from faststream.nats import NatsBroker + + broker = NatsBroker(apply_types=False) + + @broker.subscriber("test") + async def handle_msg(msg_body: str): # just an annotation, has no real effect + ... + ``` + +## Multiple Subscriptions + +You can also subscribe to multiple event streams at the same time with one function. Just wrap it with multiple `#!python @broker.subscriber(...)` decorators (they have no effect on each other). + +```python +@broker.subscriber("first_sub") +@broker.subscriber("second_sub") +async def handler(msg): + ... +``` diff --git a/search/docs/getting-started/subscription/pydantic.md b/search/docs/getting-started/subscription/pydantic.md new file mode 100644 index 0000000..24ee8fd --- /dev/null +++ b/search/docs/getting-started/subscription/pydantic.md @@ -0,0 +1,43 @@ +# Pydantic Serialization + +## pydantic.Field + +Besides, **FastStream** uses your handlers' annotations to collect information about the application schema and generate [**AsyncAPI**](https://www.asyncapi.com){.external-link target="_blank"} schema. + +You can access this information with extra details using `pydantic.Field` (such as title, description and examples). Additionally, [**Fields**](https://docs.pydantic.dev/latest/usage/fields/){.external-link target="_blank"} usage allows you to add extra validations to your message schema. + +Just use `pydantic.Field` as a function default argument: + +=== "Kafka" + ```python linenums="1" hl_lines="12-17" + {!> docs_src/getting_started/subscription/pydantic_fields_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="12-17" + {!> docs_src/getting_started/subscription/pydantic_fields_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="12-17" + {!> docs_src/getting_started/subscription/pydantic_fields_nats.py !} + ``` + +## pydantic.BaseModel + +To make your message schema reusable between different subscribers and publishers, you can decalre it as a `pydantic.BaseModel` and use it as a single message annotation: + +=== "Kafka" + ```python linenums="1" hl_lines="1 10 20" + {!> docs_src/getting_started/subscription/pydantic_model_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 10 20" + {!> docs_src/getting_started/subscription/pydantic_model_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 10 20" + {!> docs_src/getting_started/subscription/pydantic_model_nats.py !} + ``` diff --git a/search/docs/getting-started/subscription/test.md b/search/docs/getting-started/subscription/test.md new file mode 100644 index 0000000..55f7425 --- /dev/null +++ b/search/docs/getting-started/subscription/test.md @@ -0,0 +1,152 @@ +# Subscriber Testing + +Testability is a crucial part of any application, and **FastStream** provides you with the tools to test your code easily. + +## Original Application + +Let's take a look at the original application to test + +=== "Kafka" + ```python linenums="1" title="annotation_kafka.py" + {!> docs_src/getting_started/subscription/annotation_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" title="annotation_rabbit.py" + {!> docs_src/getting_started/subscription/annotation_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" title="annotation_rabbit.py" + {!> docs_src/getting_started/subscription/annotation_nats.py !} + ``` + +It consumes **JSON** messages like `#!json { "name": "username", "user_id": 1 }` + +You can test your consume function like a regular one, for sure: + +```python +@pytest.mark.asyncio +async def test_handler(): + await handle("John", 1) +``` + +But if you want to test your function closer to your real runtime, you should use the special **FastStream** test client. + +## In-Memory Testing + +Deploying a whole service with a Message Broker is a bit too much just for testing purposes, especially in your CI environment. Not to mention the possible loss of messages due to network failures when working with real brokers. + +For this reason, **FastStream** has a special `TestClient` to make your broker work in `InMemory` mode. + +Just use it like a regular async context manager - all published messages will be routed in-memory (without any external dependencies) and consumed by the correct handler. + +=== "Kafka" + ```python linenums="1" hl_lines="4 11-12" + {!> docs_src/getting_started/subscription/testing_kafka.py [ln:1-12] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="4 11-12" + {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:1-12] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="4 11-12" + {!> docs_src/getting_started/subscription/testing_nats.py [ln:1-12] !} + ``` + +### Catching Exceptions + +This way you can catch any exceptions that occur inside your handler: + +=== "Kafka" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/subscription/testing_kafka.py [ln:18-23] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:18-23] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="4" + {!> docs_src/getting_started/subscription/testing_nats.py [ln:18-23] !} + ``` + +### Validates Input + +Also, your handler has a mock object to validate your input or call counts. + +=== "Kafka" + ```python linenums="1" hl_lines="6" + {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-14] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="6" + {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-14] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="6" + {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-14] !} + ``` + +!!! note + The Handler mock has a not-serialized **JSON** message body. This way you can validate the incoming message view, not python arguments. + + Thus our example checks not `#!python mock.assert_called_with(name="John", user_id=1)`, but `#!python mock.assert_called_with({ "name": "John", "user_id": 1 })`. + +You should be careful with this feature: all mock objects will be cleared when the context manager exits. + +=== "Kafka" + ```python linenums="1" hl_lines="6 8" + {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-16] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="6 8" + {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-16] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="6 8" + {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-16] !} + ``` + +## Real Broker Testing + +If you want to test your application in a real environment, you shouldn't have to rewrite all you tests: just pass `with_real` optional parameter to your `TestClient` context manager. This way, `TestClient` supports all the testing features but uses an unpatched broker to send and consume messages. + +=== "Kafka" + ```python linenums="1" hl_lines="4 11 13 20 23" + {!> docs_src/getting_started/subscription/real_testing_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="4 11 13 20 23" + {!> docs_src/getting_started/subscription/real_testing_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="4 11 13 20 23" + {!> docs_src/getting_started/subscription/real_testing_nats.py !} + ``` + +!!! tip + When you're using a patched broker to test your consumers, the publish method is called synchronously with a consumer one, so you need not wait until your message is consumed. But in the real broker's case, it doesn't. + + For this reason, you have to wait for message consumption manually with the special `#!python handler.wait_call(timeout)` method. + Also, inner handler exceptions will be raised in this function, not `#!python broker.publish(...)`. + +### A Little Tip + +It can be very helpful to set the `with_real` flag using an environment variable. This way, you will be able to choose the testing mode right from the command line: + +```bash +WITH_REAL=True/False pytest tests/ +``` + +To learn more about managing your application configiruation visit [this](../config/index.md){.internal-link} page. diff --git a/search/docs/getting-started/template/index.md b/search/docs/getting-started/template/index.md new file mode 100644 index 0000000..8f794b1 --- /dev/null +++ b/search/docs/getting-started/template/index.md @@ -0,0 +1,207 @@ +# FastStream Template + +[FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} is a versatile repository that provides a solid foundation for your Python projects. It comes with a basic application, testing infrastructure, linting scripts, and various development tools to kickstart your development process. Whether you're building a new application from scratch or want to enhance an existing one, this template will save you time and help you maintain high code quality. + +## Features + +* **Basic Application**: [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} includes a basic Python application as a starting point for your project. You can easily replace it with your own code. + +* **Testing Framework**: We've set up [`pytest`](https://pytest.org/){.external-link target="_blank"} for running unit tests. Write your tests in the tests directory and use the provided workflow for automated testing. + +* **Linting**: Keep your code clean and consistent with linting tools. The repository includes linting scripts and configurations for [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"}, [`black`](https://github.com/psf/black){.external-link target="_blank"}, [`ruff`](https://github.com/astral-sh/ruff){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} + +* **Docker Support**: The included Dockerfile allows you to containerize your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application. Build and run your application in a containerized environment with ease. + +* **Dependency Management**: All application requirements and development dependencies are specified in the `pyproject.toml` file. This includes not only your project's dependencies but also configurations for various tools like [`pytest`](https://pytest.org/){.external-link target="_blank"}, [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"}, [`black`](https://github.com/psf/black){.external-link target="_blank"}, [`ruff`](https://github.com/astral-sh/ruff){.external-link target="_blank"}, and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"}. + +* **Continuous Integration (CI)**: [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} comes with three [GitHub Actions](https://github.com/features/actions){.external-link target="_blank"} workflows under the `.github/workflows` directory: + + 1. **Static Analysis and Testing**: This workflow consists of two jobs. The first job runs static analysis tools ([`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"}) to check your code for potential issues. If successful, the second job runs [`pytest`](https://pytest.org/){.external-link target="_blank"} to execute your test suite. + + 2. **Docker Build and Push**: This workflow automates the process of building a [`Docker`](https://www.docker.com/){.external-link target="_blank"} image for your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application and pushing it to the [GitHub Container Registry](https://ghcr.io){.external-link target="_blank"}. + + 3. **AsyncAPI Documentation**: The third workflow builds [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation for your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application and deploys it to [GitHub Pages](https://pages.github.com/){.external-link target="_blank"}. This is useful for documenting your API and making it accessible to others. + +## Getting Started + +To set up your development environment, follow these steps: + +1. In the [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} repository, click on **Use this template** -> **Create a new repository**:![use-this-template](https://github.com/airtai/faststream/assets/7011056/6e4e913c-8cd5-4e14-95dc-3bc13b6989be) + In the next screen, fill out details such as repository name, description, etc:![create-repo](https://github.com/airtai/faststream/assets/7011056/77734648-6356-4388-b8c4-279a38fb5f65) + +2. Clone this repository to your local machine: + ```bash + git clone https://github.com//.git + cd + ``` + > **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. + +3. Install all development requirements using pip: + ```bash + pip install -e ".[dev]" + ``` + +## Development + +The application code is located in the `app/` directory. You can add new features or fix bugs in this directory. However, remember that code changes must be accompanied by corresponding updates to the tests located in the `tests/` directory. + +## Running Tests + +Once you have updated tests, you can execute the tests using [`pytest`](https://pytest.org/){.external-link target="_blank"}: + +```bash +pytest +``` + +## Running FastStream Application Locally + +To run the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application locally, follow these steps: + +1. Start the Kafka Docker container locally using the provided script: + ```bash + ./scripts/start_kafka_broker_locally.sh + ``` + +2. Start the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application with the following command: + ```bash + faststream run app.application:app --workers 1 + ``` + +3. You can now send messages to the Kafka topic and can test the application. Optionally, if you want to view messages in a topic, you can subscribe to it using the provided script: + ```bash + ./scripts/subscribe_to_kafka_broker_locally.sh + ``` + +4. To stop the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application, press `Ctrl+C`. + +5. Finally, stop the Kafka Docker container by running the script: + ```bash + ./scripts/stop_kafka_broker_locally.sh + ``` + +## Building and Testing Docker Image Locally + +If you'd like to build and test the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally, follow these steps: + +1. Run the provided script to build the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally. Use the following command: + ```bash + ./scripts/build_docker.sh + ``` + This script will build the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally with the same name as the one built in `CI`. + +2. Before starting the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container, ensure that a Kafka [`Docker`](https://www.docker.com/){.external-link target="_blank"} container is running locally. You can start it using the provided script: + ```bash + ./scripts/start_kafka_broker_locally.sh + ``` + +3. Once Kafka is up and running, you can start the local [`Docker`](https://www.docker.com/){.external-link target="_blank"} container using the following command: + ```bash + docker run --rm --name faststream-app --net=host ghcr.io//:latest + ``` + `--rm`: This flag removes the container once it stops running, ensuring that it doesn't clutter your system with unused containers. + `--name faststream-app`: Assigns a name to the running container, in this case, "faststream-app". + `--net=host`: This flag allows the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container to share the host's network namespace. + +4. To stop the local [`Docker`](https://www.docker.com/){.external-link target="_blank"} container, simply press `Ctrl+C` in your terminal. + +5. Finally, stop the Kafka [`Docker`](https://www.docker.com/){.external-link target="_blank"} container by running the provided script: + ```bash + ./scripts/stop_kafka_broker_locally.sh + ``` + +> **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository in the above commands. + +## Code Linting + +After making changes to the code, it's essential to ensure it adheres to coding standards. We provide a script to help you with code formatting and linting. Run the following script to automatically fix linting issues: + +```bash +./scripts/lint.sh +``` + +## Static Analysis + +Static analysis tools [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} can help identify potential issues in your code. To run static analysis, use the following script: + +```bash +./scripts/static-analysis.sh +``` + +If there are any static analysis errors, resolve them in your code and rerun the script until it passes successfully. + +## Viewing AsyncAPI Documentation + +[`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} framework supports [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation. To ensure that your changes are reflected in the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation, follow these steps: + +1. Run the following command to view the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation: + ```bash + faststream docs serve app.application:app + ``` + This command builds the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} specification file, generates [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation based on the specification, and serves it at `localhost:8000`. + +2. Open your web browser and navigate to to view the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation reflecting your changes. + +3. To stop the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation server, press `Ctrl+C`. + +## Contributing + +Once you have successfully completed all the above steps, you are ready to contribute your changes: + +1. Add and commit your changes: + ```bash + git add . + git commit -m "Your commit message" + ``` + +2. Push your changes to GitHub: + ```bash + git push origin your-branch + ``` + +3. Create a merge request on GitHub. + +## Continuous Integration (CI) + +This repository is equipped with GitHub Actions that automate static analysis and pytest in the CI pipeline. Even if you forget to perform any of the required steps, CI will catch any issues before merging your changes. + +This repository has three workflows, each triggered when code is pushed: + +1. **Tests Workflow**: This workflow is named "Tests" and consists of two jobs. The first job runs static analysis tools [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} to identify potential issues in the codebase. The second job runs tests using [`pytest`](https://pytest.org/){.external-link target="_blank"} to ensure the functionality of the application. Both jobs run simultaneously to expedite the `CI` process. + +2. **Build Docker Image Workflow**: This workflow is named "Build Docker Image" and has one job. In this job, a [`Docker`](https://www.docker.com/){.external-link target="_blank"} image is built based on the provided Dockerfile. The built image is then pushed to the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"}, making it available for deployment or other purposes. + +3. **Deploy FastStream AsyncAPI Docs Workflow**: The final workflow is named "Deploy FastStream AsyncAPI Docs" and also consists of a single job. In this job, the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is built from the specification, and the resulting documentation is deployed to [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"}. This allows for easy access and sharing of the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation with the project's stakeholders. + +## Viewing AsyncAPI Documentation Hosted at GitHub Pages + +After the **Deploy FastStream AsyncAPI Docs** workflow in `CI` has been successfully completed, the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is automatically deployed to [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"}. This provides a convenient way to access and share the documentation with project stakeholders. + +To view the deployed [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation, open your web browser and navigate to the following URL: + +```txt +https://.github.io// +``` + +> **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. + +You will be directed to the [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"} site where your [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is hosted. This hosted documentation allows you to easily share your [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} specifications with others and provides a centralized location for reviewing the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation. + +## Deploying Docker Container + +Once the **Build Docker Image** workflow in `CI` has successfully completed, the built [`Docker`](https://www.docker.com/){.external-link target="_blank"} image is pushed to the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"}. You can then deploy this image on your server by following these steps: + +1. Pull the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image from the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"} to your server using the following command: + ```bash + docker pull ghcr.io//:latest + ``` + > **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. + +2. After successfully pulling the image, start the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container using the following command: + ```bash + docker run --rm --name faststream-app --env-file /path/to/env-file ghcr.io//:latest + ``` + `--rm`: This flag removes the container once it stops running, ensuring that it doesn't clutter your system with unused containers. + `--name faststream-app`: Assigns a name to the running container, in this case, "faststream-app". + `--env-file /path/to/env-file`: Specifies the path to an environment file (commonly a `.env` file) that contains environment variables required by your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application. Storing secrets and configuration in an environment file is a secure and best practice for handling sensitive information such as Kafka host, port, and authentication details. + +By following these steps, you can easily deploy your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application as a [`Docker`](https://www.docker.com/){.external-link target="_blank"} container on your server. Remember to customize the `env-file` and other environment variables as needed to suit your specific application requirements. diff --git a/search/docs/index.md b/search/docs/index.md new file mode 100644 index 0000000..7969e3e --- /dev/null +++ b/search/docs/index.md @@ -0,0 +1,352 @@ +--- +hide: + - navigation + - footer +--- + +# FastStream + +Effortless event stream integration for your services + +--- + +

+ + Test Passing + + + + Coverage + + + + Downloads + + + + Package version + + + + Supported Python versions + + +
+ + + CodeQL + + + + Dependency Review + + + + License + + + + Code of Conduct + + + + Discord + +

+ +--- + +## Features + +[**FastStream**](https://faststream.airt.ai/) simplifies the process of writing producers and consumers for message queues, handling all the +parsing, networking and documentation generation automatically. + +Making streaming microservices has never been easier. Designed with junior developers in mind, **FastStream** simplifies your work while keeping the door open for more advanced use-cases. Here's a look at the core features that make **FastStream** a go-to framework for modern, data-centric microservices. + +- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ**, **NATS** support) + +- [**Pydantic Validation**](#writing-app-code): Leverage [**Pydantic's**](https://docs.pydantic.dev/){.external-link target="_blank"} validation capabilities to serialize and validates incoming messages + +- [**Automatic Docs**](#project-documentation): Stay ahead with automatic [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} documentation + +- **Intuitive**: Full-typed editor support makes your development experience smooth, catching errors before they reach runtime + +- [**Powerful Dependency Injection System**](#dependencies): Manage your service dependencies efficiently with **FastStream**'s built-in DI system + +- [**Testable**](#testing-the-service): Supports in-memory tests, making your CI/CD pipeline faster and more reliable + +- **Extendable**: Use extensions for lifespans, custom serialization and middlewares + +- [**Integrations**](#any-framework): **FastStream** is fully compatible with any HTTP framework you want ([**FastAPI**](#fastapi-plugin) especially) + +- [**Built for Automatic Code Generation**](#code-generator): **FastStream** is optimized for automatic code generation using advanced models like GPT and Llama + +That's **FastStream** in a nutshell—easy, efficient, and powerful. Whether you're just starting with streaming microservices or looking to scale, **FastStream** has got you covered. + +--- + +## History + +**FastStream** is a new package based on the ideas and experiences gained from [**FastKafka**](https://github.com/airtai/fastkafka){.external-link target="_blank"} and [**Propan**](https://github.com/lancetnik/propan){.external-link target="_blank"}. By joining our forces, we picked up the best from both packages and created a unified way to write services capable of processing streamed data regradless of the underliying protocol. We'll continue to maintain both packages, but new development will be in this project. If you are starting a new service, this package is the recommended way to do it. + +--- + +## Install + +**FastStream** works on **Linux**, **macOS**, **Windows** and most **Unix**-style operating systems. +You can install it with `pip` as usual: + +=== "Kafka" + ```sh + pip install faststream[kafka] + ``` + +=== "RabbitMQ" + ```sh + pip install faststream[rabbit] + ``` + +=== "NATS" + ```sh + pip install faststream[nats] + ``` + +!!! tip "" + By default **FastStream** uses **PydanticV2** written in **Rust**, but you can downgrade it manually, if your platform has no **Rust** support - **FastStream** will work correctly with **PydanticV1** as well. + +--- + +## Writing app code + +**FastStream** brokers provide convenient function decorators `#!python @broker.subscriber` +and `#!python @broker.publisher` to allow you to delegate the actual process of: + +- consuming and producing data to Event queues, and + +- decoding and encoding JSON encoded messages + +These decorators make it easy to specify the processing logic for your consumers and producers, allowing you to focus on the core business logic of your application without worrying about the underlying integration. + +Also, **FastStream** uses [**Pydantic**](https://docs.pydantic.dev/){.external-link target="_blank"} to parse input +JSON-encoded data into Python objects, making it easy to work with structured data in your applications, so you can serialize your input messages just using type annotations. + +Here is an example python app using **FastStream** that consumes data from an incoming data stream and outputs the data to another one: + +=== "Kafka" + ```python linenums="1" hl_lines="9" + {!> docs_src/index/basic_kafka.py!} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="9" + {!> docs_src/index/basic_rabbit.py!} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="9" + {!> docs_src/index/basic_nats.py!} + ``` + +Also, **Pydantic**’s [`BaseModel`](https://docs.pydantic.dev/usage/models/){.external-link target="_blank"} class allows you +to define messages using a declarative syntax, making it easy to specify the fields and types of your messages. + +=== "Kafka" + ```python linenums="1" hl_lines="1 8 14" + {!> docs_src/index/pydantic_kafka.py !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="1 8 14" + {!> docs_src/index/pydantic_rabbit.py !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="1 8 14" + {!> docs_src/index/pydantic_nats.py !} + ``` + +--- + +## Testing the service + +The service can be tested using the `TestBroker` context managers, which, by default, puts the Broker into "testing mode". + +The Tester will redirect your `subscriber` and `publisher` decorated functions to the InMemory brokers, allowing you to quickly test your app without the need for a running broker and all its dependencies. + +Using pytest, the test for our service would look like this: + +=== "Kafka" + ```python linenums="1" hl_lines="5 10 18-19" + # Code above omitted 👆 + + {!> docs_src/index/test_kafka.py [ln:3-21] !} + ``` + +=== "RabbitMQ" + ```python linenums="1" hl_lines="5 10 18-19" + # Code above omitted 👆 + + {!> docs_src/index/test_rabbit.py [ln:3-21] !} + ``` + +=== "NATS" + ```python linenums="1" hl_lines="5 10 18-19" + # Code above omitted 👆 + + {!> docs_src/index/test_nats.py [ln:3-21] !} + ``` + +## Running the application + +The application can be started using built-in **FastStream** CLI command. + +To run the service, use the **FastStream CLI** command and pass the module (in this case, the file where the app implementation is located) and the app symbol to the command. + +``` shell +faststream run basic:app +``` + +After running the command, you should see the following output: + +``` shell +INFO - FastStream app starting... +INFO - input_data | - `HandleMsg` waiting for messages +INFO - FastStream app started successfully! To exit press CTRL+C +``` + +Also, **FastStream** provides you a great hot reload feature to improve your Development Experience + +``` shell +faststream run basic:app --reload +``` + +And multiprocessing horizontal scaling feature as well: + +``` shell +faststream run basic:app --workers 3 +``` + +You can know more about **CLI** features [here](./getting-started/cli/index.md){.internal-link} + +--- + +## Project Documentation + +**FastStream** automatically generates documentation for your project according to the [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} specification. You can work with both generated artifacts and place a web view of your documentation on resources available to related teams. + +The availability of such documentation significantly simplifies the integration of services: you can immediately see what channels and message formats the application works with. And most importantly, it won't cost anything - **FastStream** has already created the docs for you! + +![HTML-page](../assets/img/AsyncAPI-basic-html-short.png) + +--- + +## Dependencies + +**FastStream** (thanks to [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"}) has a dependency management system similar to `pytest fixtures` and `FastAPI Depends` at the same time. Function arguments declare which dependencies you want are needed, and a special decorator delivers them from the global Context object. + +```python linenums="1" hl_lines="9-10" +from faststream import Depends, Logger +async def base_dep(user_id: int) -> bool: + return True + +@broker.subscriber("in-test") +async def base_handler(user: str, + logger: Logger, + dep: bool = Depends(base_dep)): + assert dep is True + logger.info(user) +``` + +--- + +## HTTP Frameworks integrations + +### Any Framework + +You can use **FastStream** `MQBrokers` without a `FastStream` application. +Just *start* and *stop* them according to your application's lifespan. + +{! includes/index/integrations.md !} + +### **FastAPI** Plugin + +Also, **FastStream** can be used as part of **FastAPI**. + +Just import a **StreamRouter** you need and declare the message handler with the same `#!python @router.subscriber(...)` and `#!python @router.publisher(...)` decorators. + +!!! tip + When used this way, **FastStream** does not utilize its own dependency and serialization system but integrates seamlessly into **FastAPI**. + This means you can use `Depends`, `BackgroundTasks` and other **FastAPI** tools as if it were a regular HTTP endpoint. + +{! includes/getting_started/integrations/fastapi/1.md !} + +!!! note + More integration features can be found [here](./getting-started/integrations/fastapi/index.md){.internal-link} + +--- + +## Code generator + +As evident, **FastStream** is an incredibly user-friendly framework. However, we've taken it a step further and made it even more user-friendly! Introducing [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"}, a Python library that harnesses the power of generative AI to effortlessly generate **FastStream** applications. Simply describe your application requirements, and [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"} will generate a production-grade **FastStream** project that is ready to deploy in no time. + +Save application description inside `description.txt`: + +``` +Create a FastStream application using localhost broker for testing and use the +default port number. + +It should consume messages from the 'input_data' topic, where each message is a +JSON encoded object containing a single attribute: 'data'. + +While consuming from the topic, increment the value of the data attribute by 1. + +Finally, send message to the 'output_data' topic. +``` + +and run the following command to create a new **FastStream** project: + +``` shell +faststream_gen -i description.txt +``` + +``` shell +✨ Generating a new FastStream application! + ✔ Application description validated. + ✔ FastStream app skeleton code generated. akes around 15 to 45 seconds)... + ✔ The app and the tests are generated. around 30 to 90 seconds)... + ✔ New FastStream project created. + ✔ Integration tests were successfully completed. + Tokens used: 10768 + Total Cost (USD): $0.03284 +✨ All files were successfully generated! +``` + +### Tutorial + +We also invite you to explore our tutorial, where we will guide you through the process of utilizing the [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"} Python library to effortlessly create **FastStream** applications: + +- [Cryptocurrency analysis with FastStream](https://faststream-gen.airt.ai/Tutorial/Cryptocurrency_Tutorial/){.external-link target="_blank"} + +--- + +## Stay in touch + +Please show your support and stay in touch by: + +- giving our [GitHub repository](https://github.com/airtai/faststream/){.external-link target="_blank"} a star, and + +- joining our [Discord server](https://discord.gg/CJWmYpyFbc){.external-link target="_blank"} + +Your support helps us to stay in touch with you and encourages us to +continue developing and improving the framework. Thank you for your +support! + +--- + +## Contributors + +Thanks to all of these amazing people who made the project better! + + + + diff --git a/search/docs/kafka/Publisher/batch_publisher.md b/search/docs/kafka/Publisher/batch_publisher.md new file mode 100644 index 0000000..5697c91 --- /dev/null +++ b/search/docs/kafka/Publisher/batch_publisher.md @@ -0,0 +1,96 @@ +# Publishing in Batches + +## General overview + +If you need to send your data in batches, the @broker.publisher(...) decorator offers a convenient way to achieve this. To enable batch production, you need to perform two crucial steps: + +Step 1: When creating your publisher, set the batch argument to True. This configuration tells the publisher that you intend to send messages in batches. + +Step 2: In your producer function, return a tuple containing the messages you want to send as a batch. This action triggers the producer to gather the messages and transmit them as a batch to a Kafka broker. + +Let's delve into a detailed example illustrating how to produce messages in batches to the output_data topic while consuming from the input_data_1 topic. + +## Code example + +First, lets take a look at the whole app creation and then dive deep into the steps for producing in batches, here is the application code: + +```python linenums="1" +from typing import Tuple + +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +decrease_and_increase = broker.publisher("output_data", batch=True) + + +@decrease_and_increase +@broker.subscriber("input_data_1") +async def on_input_data_1(msg: Data, logger: Logger) -> Tuple[Data, Data]: + logger.info(msg) + return Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) + + +@broker.subscriber("input_data_2") +async def on_input_data_2(msg: Data, logger: Logger) -> None: + logger.info(msg) + await decrease_and_increase.publish( + Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) + ) +``` + +Below, we have highlighted key lines of code that demonstrate the steps involved in creating and using a batch publisher: + +Step 1: Creation of the Publisher + +```python linenums="1" +decrease_and_increase = broker.publisher("output_data", batch=True) +``` + +Step 2: Publishing an Actual Batch of Messages + +You can publish a batch by directly calling the publisher with a batch of messages you want to publish, like shown here: + +```python linenums="1" + await decrease_and_increase.publish( + Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) + ) +``` + +Or you can decorate your processing function and return a batch of messages like shown here: + +```python linenums="1" +@decrease_and_increase +@broker.subscriber("input_data_1") +async def on_input_data_1(msg: Data, logger: Logger) -> Tuple[Data, Data]: + logger.info(msg) + return Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) +``` + +The application in the example imelements both of these ways, feel free to use whatever option fits your needs better. + +## Why publish in batches? + +In this example, we've explored how to leverage the @broker.publisher decorator to efficiently publish messages in batches using FastStream and Kafka. By following the two key steps outlined in the previous sections, you can significantly enhance the performance and reliability of your Kafka-based applications. + +Publishing messages in batches offers several advantages when working with Kafka: + +1. Improved Throughput: Batch publishing allows you to send multiple messages in a single transmission, reducing the overhead associated with individual message delivery. This leads to improved throughput and lower latency in your Kafka applications. + +2. Reduced Network and Broker Load: Sending messages in batches reduces the number of network calls and broker interactions. This optimization minimizes the load on the Kafka brokers and network resources, making your Kafka cluster more efficient. + +3. Atomicity: Batches ensure that a group of related messages is processed together or not at all. This atomicity can be crucial in scenarios where message processing needs to maintain data consistency and integrity. + +4. Enhanced Scalability: With batch publishing, you can efficiently scale your Kafka applications to handle high message volumes. By sending messages in larger chunks, you can make the most of Kafka's parallelism and partitioning capabilities. diff --git a/search/docs/kafka/Publisher/index.md b/search/docs/kafka/Publisher/index.md new file mode 100644 index 0000000..5e34d73 --- /dev/null +++ b/search/docs/kafka/Publisher/index.md @@ -0,0 +1,126 @@ +# Publishing + +The **FastStream** KafkaBroker supports all regular [publishing use cases](../../getting-started/publishing/index.md){.internal-link}, and you can use them without any changes. + +However, if you wish to further customize the publishing logic, you should take a closer look at specific KafkaBroker parameters. + +## Basic Kafka Publishing + +The `KafkaBroker` uses the unified `publish` method (from a `producer` object) to send messages. + +In this case, you can use Python primitives and `pydantic.BaseModel` to define the content of the message you want to publish to the Kafka broker. + +You can specify the topic to send by its name. + +1. Create your KafkaBroker instance + +```python linenums="1" +broker = KafkaBroker("localhost:9092") +``` + +2. Publish a message using the `publish` method + +```python linenums="1" + msg = Data(data=0.5) + + await broker.publish( + model_to_json(msg), + "input_data", + headers={"content-type": "application/json"}, + ) +``` + +This is the most basic way of using the KafkaBroker to publish a message. + +## Creating a publisher object + +The simplest way to use a KafkaBroker for publishing has a significant limitation: your publishers won't be documented in the AsyncAPI documentation. This might be acceptable for sending occasional one-off messages. However, if you're building a comprehensive service, it's recommended to create publisher objects. These objects can then be parsed and documented in your service's AsyncAPI documentation. Let's go ahead and create those publisher objects! + +1. Create your KafkaBroker instance + +```python linenums="1" +broker = KafkaBroker("localhost:9092") +``` + +2. Create a publisher instance + +```python linenums="1" +prepared_publisher = broker.publisher("input_data") +``` + +2. Publish a message using the `publish` method of the prepared publisher + +```python linenums="1" + msg = Data(data=0.5) + + await prepared_publisher.publish( + model_to_json(msg), + headers={"content-type": "application/json"}, + ) +``` + +Now, when you wrap your broker into a FastStream object, the publisher will be exported to the AsyncAPI documentation. + +## Decorating your publishing functions + +To publish messages effectively in the Kafka context, consider utilizing the Publisher Decorator. This approach offers an AsyncAPI representation and is ideal for rapidly developing applications. + +The Publisher Decorator creates a structured DataPipeline unit with both input and output components. The sequence in which you apply Subscriber and Publisher decorators does not affect their functionality. However, note that these decorators can only be applied to functions decorated by a Subscriber as well. + +This method relies on the return type annotation of the handler function to properly interpret the function's return value before sending it. Hence, it's important to ensure accuracy in defining the return type. + +Let's start by examining the entire application that utilizes the Publisher Decorator and then proceed to walk through it step by step. + +```python linenums="1" +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import FastStream +from faststream.kafka import KafkaBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@to_output_data +@broker.subscriber("input_data") +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) +``` + +1. **Initialize the KafkaBroker instance:** Start by initializing a KafkaBroker instance with the necessary configuration, including Kafka broker address. + +```python linenums="1" +broker = KafkaBroker("localhost:9092") +``` + +2. **Prepare your publisher object to use later as a decorator:** + +```python linenums="1" +to_output_data = broker.publisher("output_data") +``` + +3. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic + +```python linenums="1" +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) +``` + +4. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. + +```python linenums="1" +@to_output_data +@broker.subscriber("input_data") +async def on_input_data(msg: Data) -> Data: + return Data(data=msg.data + 1.0) +``` diff --git a/search/docs/kafka/Publisher/using_a_key.md b/search/docs/kafka/Publisher/using_a_key.md new file mode 100644 index 0000000..73faef6 --- /dev/null +++ b/search/docs/kafka/Publisher/using_a_key.md @@ -0,0 +1,61 @@ +# Using a Partition Key + +Partition keys are a crucial concept in Apache Kafka, enabling you to determine the appropriate partition for a message. This ensures that related messages are kept together in the same partition, which can be invaluable for maintaining order or grouping related messages for efficient processing. Additionally, Kafka utilizes partitioning to distribute load across multiple brokers and scale horizontally, while replicating data across brokers provides fault tolerance. + +You can specify your partition keys when utilizing the `@KafkaBroker.publisher(...)` decorator in FastStream. This guide will walk you through the process of using partition keys effectively. + +## Publishing with a Partition Key + +To publish a message to a Kafka topic using a partition key, follow these steps: + +### Step 1: Define the Publisher + +In your FastStream application, define the publisher using the `@KafkaBroker.publisher(...)` decorator. This decorator allows you to configure various aspects of message publishing, including the partition key. + +```python linenums="1" +to_output_data = broker.publisher("output_data") +``` + +### Step 2: Pass the Key + +When you're ready to publish a message with a specific key, simply include the `key` parameter in the `publish` function call. This key parameter is used to determine the appropriate partition for the message. + +```python linenums="1" + await to_output_data.publish(Data(data=msg.data + 1.0), key=b"key") +``` + +## Example Application + +Let's examine a complete application example that consumes messages from the **input_data** topic and publishes them with a specified key to the **output_data** topic. This example will illustrate how to incorporate partition keys into your Kafka-based applications: + +```python linenums="1" +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import Context, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Data(BaseModel): + data: NonNegativeFloat = Field( + ..., examples=[0.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Data, logger: Logger, key: bytes = Context("message.raw_message.key") +) -> None: + logger.info(f"on_input_data({msg=})") + await to_output_data.publish(Data(data=msg.data + 1.0), key=b"key") +``` + +As you can see, the primary difference from standard publishing is the inclusion of the `key` parameter in the `publish` call. This key parameter is essential for controlling how Kafka partitions and processes your messages. + +In summary, using partition keys in Apache Kafka is a fundamental practice for optimizing message distribution, maintaining order, and achieving efficient processing. It is a key technique for ensuring that your Kafka-based applications scale gracefully and handle large volumes of data effectively. diff --git a/search/docs/kafka/Subscriber/batch_subscriber.md b/search/docs/kafka/Subscriber/batch_subscriber.md new file mode 100644 index 0000000..3a207fd --- /dev/null +++ b/search/docs/kafka/Subscriber/batch_subscriber.md @@ -0,0 +1,58 @@ +# Batch Subscriber + +If you want to consume data in batches, the `@broker.subscriber(...)` decorator makes it possible. By defining your consumed `msg` object as a list of messages and setting the `batch` parameter to `True`, the subscriber will call your consuming function with a batch of messages consumed from a single partition. Let's walk through how to achieve this. + +## Using the Subscriber with Batching + +To consume messages in batches, follow these steps: + +### Step 1: Define Your Subscriber + +In your FastStream application, define the subscriber using the `@broker.subscriber(...)` decorator. Ensure that you configure the `msg` object as a list and set the `batch` parameter to `True`. This configuration tells the subscriber to handle message consumption in batches. + +```python linenums="1" +@broker.subscriber("test_batch", batch=True) +``` + +### Step 2: Implement Your Consuming Function + +Create a consuming function that accepts the list of messages. The `@broker.subscriber(...)` decorator will take care of collecting and grouping messages into batches based on the partition. + +```python linenums="1" +@broker.subscriber("test_batch", batch=True) +async def handle_batch(msg: List[HelloWorld], logger: Logger): + logger.info(msg) +``` + +## Example of Consuming in Batches + +Let's illustrate how to consume messages in batches from the **test_batch** topic with a practical example: + +```python linenums="1" +from typing import List + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +class HelloWorld(BaseModel): + msg: str = Field( + ..., + examples=["Hello"], + description="Demo hello world message", + ) + + +@broker.subscriber("test_batch", batch=True) +async def handle_batch(msg: List[HelloWorld], logger: Logger): + logger.info(msg) +``` + +In this example, the subscriber is configured to process messages in batches, and the consuming function is designed to handle these batches efficiently. + +Consuming messages in batches is a valuable technique when you need to optimize the processing of high volumes of data in your Kafka-based applications. It allows for more efficient resource utilization and can enhance the overall performance of your data pipelines. diff --git a/search/docs/kafka/Subscriber/index.md b/search/docs/kafka/Subscriber/index.md new file mode 100644 index 0000000..bbeaf90 --- /dev/null +++ b/search/docs/kafka/Subscriber/index.md @@ -0,0 +1,78 @@ +# Basic Subscriber + +To start consuming from a Kafka topic, just decorate your consuming function with a `#!python @broker.subscriber(...)` decorator, passing a string as a topic key. + +In the folowing example, we will create a simple FastStream app that will consume `HelloWorld` messages from a **hello_world** topic. + +The full app code looks like this: + +```python linenums="1" +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class HelloWorld(BaseModel): + msg: str = Field( + ..., + examples=["Hello"], + description="Demo hello world message", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.subscriber("hello_world") +async def on_hello_world(msg: HelloWorld, logger: Logger): + logger.info(msg) +``` + +## Import FastStream and KafkaBroker + +To use the `#!python @broker.subscriber(...)` decorator, first we need to import the base FastStream app KafkaBroker to create our broker. + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker +``` + +## Define the HelloWorld Message Structure + +Next, you need to define the structure of the messages you want to consume from the topic using Pydantic. For the guide, we’ll stick to something basic, but you are free to define any complex message structure you wish in your project. + +```python linenums="1" +class HelloWorld(BaseModel): + msg: str = Field( + ..., + examples=["Hello"], + description="Demo hello world message", + ) +``` + +## Create a KafkaBroker + +Next, we will create a `KafkaBroker` object and wrap it into the `FastStream` object so that we can start our app using CLI later. + +```python linenums="1" +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) +``` + +## Create a Function that will Consume Messages from a Kafka hello-world Topic + +Let’s create a consumer function that will consume `HelloWorld` messages from **hello_world** topic and log them. + +```python linenums="1" +@broker.subscriber("hello_world") +async def on_hello_world(msg: HelloWorld, logger: Logger): + logger.info(msg) +``` + +The function decorated with the `#!python @broker.subscriber(...)` decorator will be called when a message is produced to Kafka. + +The message will then be injected into the typed `msg` argument of the function, and its type will be used to parse the message. + +In this example case, when the message is sent to a **hello_world** topic, it will be parsed into a `HelloWorld` class, and the `on_hello_world` function will be called with the parsed class as the `msg` argument value. diff --git a/search/docs/kafka/ack.md b/search/docs/kafka/ack.md new file mode 100644 index 0000000..46ad0b7 --- /dev/null +++ b/search/docs/kafka/ack.md @@ -0,0 +1,88 @@ +# Consuming Acknowledgements + + +As you may know, *Kafka* consumer should commit a topic offset when consuming a message. + + +The default behaviour, also implemented as such in the **FastStream**, automatically commits (*acks*) topic offset on message consumption. This is the *at most once* consuming strategy. + + +However, if you wish to use *at least once* strategy, you should commit offset *AFTER* the message is processed correctly. To accomplish that, set a consumer group and disable `auto_commit` option like this: + + +```python +@broker.subscriber( + "test", group_id="group", auto_commit=False +) +async def base_handler(body: str): + ... +``` + + +This way, upon successful return of the processing function, the message processed will be acknowledged. In the case of an exception being raised, the message will not be acknowledged. + + +However, there are situations where you might want to use a different acknowledgement logic. + + +## Manual Acknowledgement + + +If you want to acknowledge a message manually, you can get direct access to the message object via the [Context](../getting-started/context/existed.md){.internal-link} and acknowledge the message by calling the `ack` method: + + +```python +from faststream.kafka.annotations import KafkaMessage + + +@broker.subscriber( + "test", group_id="group", auto_commit=False +) +async def base_handler(body: str, msg: KafkaMessage): + await msg.ack() + # or + await msg.nack() +``` + + +!!! tip +You can use the `nack` method to prevent offset commit and the message can be consumed by another consumer within the same group. + + +**FastStream** will see that the message was already acknowledged and will do nothing at the end of the process. + + +## Interrupt Process + + +If you wish to interrupt the processing of a message at any call stack level and acknowledge the message, you can achieve that by raising the `faststream.exceptions.AckMessage`. + + +``` python linenums="1" hl_lines="2 18" +from faststream import FastStream +from faststream.exceptions import AckMessage +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.subscriber( + "test-topic", group_id="test-group", auto_commit=False +) +async def handle(body): + smth_processing(body) + + +def smth_processing(body): + if True: + raise AckMessage() + + +@app.after_startup +async def test_publishing(): + await broker.publish("Hello!", "test-topic") +``` + + +This way, **FastStream** interrupts the current message processing and acknowledges it immediately. Similarly, you can raise `NackMessage` as well to prevent the message from being committed. diff --git a/search/docs/kafka/index.md b/search/docs/kafka/index.md new file mode 100644 index 0000000..a879655 --- /dev/null +++ b/search/docs/kafka/index.md @@ -0,0 +1,61 @@ +# Kafka Routing + +## Kafka Overview + +### What is Kafka? + +[Kafka](https://kafka.apache.org/){.external-link target="_blank"} is an open-source distributed streaming platform developed by the Apache Software Foundation. It is designed to handle high-throughput, fault-tolerant, real-time data streaming. Kafka is widely used for building real-time data pipelines and streaming applications. + +### Key Kafka Concepts + +#### 1. Publish-Subscribe Model + +Kafka is built around the publish-subscribe messaging model. In this model, data is published to topics, and multiple consumers can subscribe to these topics to receive the data. This decouples the producers of data from the consumers, allowing for flexibility and scalability. + +#### 2. Topics + +A **topic** in Kafka is a logical channel or category to which messages are published by producers and from which messages are consumed by consumers. Topics are used to organize and categorize data streams. Each topic can have multiple **partitions**, which enable Kafka to distribute data and provide parallelism for both producers and consumers. + +## Kafka Topics + +### Understanding Kafka Topics + +Topics are fundamental to Kafka and serve as the central point of data distribution. Here are some key points about topics: + +- Topics allow you to logically group and categorize messages. +- Each message sent to Kafka is associated with a specific topic. +- Topics can have one or more partitions to enable parallel processing and scaling. +- Consumers subscribe to topics to receive messages. + +### FastStream KafkaBroker + +The FastStream KafkaBroker is a key component of the FastStream framework that enables seamless integration with Apache Kafka. With the KafkaBroker, developers can easily connect to Kafka brokers, produce messages to Kafka topics, and consume messages from Kafka topics within their FastStream applications. + +### Establishing a Connection + +To connect to Kafka using the FastStream KafkaBroker module, follow these steps: + +1. **Initialize the KafkaBroker instance:** Start by initializing a KafkaBroker instance with the necessary configuration, including Kafka broker address. + +2. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic + +3. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. + +Here's a simplified code example demonstrating how to establish a connection to Kafka using FastStream's KafkaBroker module: + +```python linenums="1" +from faststream import FastStream +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +@broker.subscriber("in-topic") +@broker.publisher("out-topic") +async def handle_msg(user: str, user_id: int) -> str: + return f"User: {user_id} - {user} registered" +``` + +This minimal example illustrates how FastStream simplifies the process of connecting to Kafka and performing basic message processing from the **in_topic** to the **out-topic**. Depending on your specific use case and requirements, you can further customize your Kafka integration with FastStream to build robust and efficient streaming applications. + +For more advanced configuration options and detailed usage instructions, please refer to the FastStream Kafka documentation and the [offical Kafka documentation](https://kafka.apache.org/){.external-link target="_blank"}. diff --git a/search/docs/kafka/message.md b/search/docs/kafka/message.md new file mode 100644 index 0000000..babf867 --- /dev/null +++ b/search/docs/kafka/message.md @@ -0,0 +1,53 @@ +# Access to Message Information + +As you may know, **FastStream** serializes a message body and provides you access to it through function arguments. However, there are times when you need to access additional message attributes such as offsets, headers, or other metadata. + +## Message Access + +You can easily access this information by referring to the message object in the [Context](../getting-started/context/existed.md)! + +This object serves as a unified **FastStream** wrapper around the native broker library message (for example, `aiokafka.ConsumerRecord` in the case of *Kafka*). It contains most of the required information, including: + +* `#!python body: bytes` +* `#!python checksum: int` +* `#!python headers: Sequence[Tuple[str, bytes]]` +* `#!python key: Optional[aiokafka.structs.KT]` +* `#!python offset: int` +* `#!python partition: int` +* `#!python serialized_key_size: int` +* `#!python serialized_value_size: int` +* `#!python timestamp: int` +* `#!python timestamp_type: int` +* `#!python topic: str` +* `#!python value: Optional[aiokafka.structs.VT]` + +For example, if you would like to access the headers of an incoming message, you would do so like this: + +```python hl_lines="1 6" +from faststream.kafka.annotations import KafkaMessage + +@broker.subscriber("test") +async def base_handler( + body: str, + msg: KafkaMessage, +): + print(msg.headers) +``` + +## Message Fields Access + +In most cases, you don't need all message fields; you need to know just a part of them. +You can use [Context Fields access](../getting-started/context/fields.md) feature for this. + +For example, you can get access to the `headers` like this: + +```python hl_lines="6" +from faststream import Context + +@broker.subscriber("test") +async def base_handler( + body: str, + headers: str = Context("message.headers"), +): + print(headers) +``` diff --git a/search/docs/nats/examples/direct.md b/search/docs/nats/examples/direct.md new file mode 100644 index 0000000..856f66f --- /dev/null +++ b/search/docs/nats/examples/direct.md @@ -0,0 +1,96 @@ +# Direct + +The **Direct** Subject is the basic way to route messages in *NATS*. Its essence is very simple: +a `subject` sends messages to all consumers subscribed to it. + +## Scaling + +If one `subject` is being listened to by several consumers with the same `queue group`, the message will go to a random consumer each time. + +Thus, *NATS* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by simply launching additional instances of the consumer service. You don't need to make changes to the current infrastructure configuration: *NATS* will take care of how to distribute messages between your services. + +## Example + +The **Direct** Subject is the type used in **FastStream** by default: you can simply declare it as follows + +```python +@broker.handler("test_subject") +async def handler(): +... +``` + +Full example: + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.nats import NatsBroker + +broker = NatsBroker() +app = FastStream(broker) + +@broker.subscriber("test-subj-1", "workers") +async def base_handler1(logger: Logger): + logger.info("base_handler1") + +@broker.subscriber("test-subj-1", "workers") +async def base_handler2(logger: Logger): + logger.info("base_handler2") + +@broker.subscriber("test-subj-2", "workers") +async def base_handler3(logger: Logger): + logger.info("base_handler3") + +@app.after_startup +async def send_messages(): + await broker.publish("", "test-subj-1") # handlers: 1 or 2 + await broker.publish("", "test-subj-1") # handlers: 1 or 2 + await broker.publish("", "test-subj-2") # handlers: 3 +``` + +### Consumer Announcement + +To begin with, we have declared several consumers for two `subjects`: `test-subj-1` and `test-subj-2`: + +```python linenums="7" hl_lines="1 5 9" +@broker.subscriber("test-subj-1", "workers") +async def base_handler1(logger: Logger): + logger.info("base_handler1") + +@broker.subscriber("test-subj-1", "workers") +async def base_handler2(logger: Logger): + logger.info("base_handler2") + +@broker.subscriber("test-subj-2", "workers") +async def base_handler3(logger: Logger): + logger.info("base_handler3") +``` + +!!! note + Note that all consumers are subscribed using the same `queue_group`. Within the same service, this does not make sense, since messages will come to these handlers in turn. + Here, we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now the distribution of messages between these consumers will look like this: + +```python + await broker.publish("", "test-subj-1") # handlers: 1 or 2 +``` + +The message `1` will be sent to `handler1` or `handler2` because they are listening to one `subject` within one `queue group`. + +--- + +```python + await broker.publish("", "test-subj-1") # handlers: 1 or 2 +``` + +Message `2` will be sent similarly to message `1`. + +--- + +```python + await broker.publish("", "test-subj-2") # handlers: 3 +``` + +The message `3` will be sent to `handler3` because it is the only one listening to `test-subj-2`. diff --git a/search/docs/nats/examples/pattern.md b/search/docs/nats/examples/pattern.md new file mode 100644 index 0000000..4802aca --- /dev/null +++ b/search/docs/nats/examples/pattern.md @@ -0,0 +1,87 @@ +# Pattern + +[**Pattern**](https://docs.nats.io/nats-concepts/subjects#wildcards){.external-link target="_blank"} Subject is a powerful *NATS* routing engine. This type of `subject` routes messages to consumers based on the *pattern* specified when they connect to the `subject` and a message key. + +## Scaling + +If one `subject` is being listened to by several consumers with the same `queue group`, the message will go to a random consumer each time. + +Thus, *NATS* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by simply launching additional instances of the consumer service. You don't need to make changes to the current infrastructure configuration: *NATS* will take care of how to distribute messages between your services. + +## Example + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.nats import NatsBroker + +broker = NatsBroker() +app = FastStream(broker) + +@broker.subscriber("*.info", "workers") +async def base_handler1(logger: Logger): + logger.info("base_handler1") + +@broker.subscriber("*.info", "workers") +async def base_handler2(logger: Logger): + logger.info("base_handler2") + +@broker.subscriber("*.error", "workers") +async def base_handler3(logger: Logger): + logger.info("base_handler3") + +@app.after_startup +async def send_messages(): + await broker.publish("", "logs.info") # handlers: 1 or 2 + await broker.publish("", "logs.info") # handlers: 1 or 2 + await broker.publish("", "logs.error") # handlers: 3 +``` + +### Consumer Announcement + +To begin with, we have announced several consumers for two `subjects`: `*.info` and `*.error`: + +```python linenums="7" hl_lines="1 5 9" +@broker.subscriber("*.info", "workers") +async def base_handler1(logger: Logger): + logger.info("base_handler1") + +@broker.subscriber("*.info", "workers") +async def base_handler2(logger: Logger): + logger.info("base_handler2") + +@broker.subscriber("*.error", "workers") +async def base_handler3(logger: Logger): + logger.info("base_handler3") +``` + +At the same time, in the `subject` of our consumers, we specify the *pattern* that will be processed by these consumers. + +!!! note + Note that all consumers are subscribed using the same `queue_group`. Within the same service, this does not make sense, since messages will come to these handlers in turn. + Here, we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now the distribution of messages between these consumers will look like this: + +```python + await broker.publish("", "logs.info") # handlers: 1 or 2 +``` + +The message `1` will be sent to `handler1` or `handler2` because they listen to the same `subject` template within the same `queue group`. + +--- + +```python + await broker.publish("", "logs.info") # handlers: 1 or 2 +``` + +Message `2` will be sent similarly to message `1`. + +--- + +```python + await broker.publish("", "logs.error") # handlers: 3 +``` + +The message `3` will be sent to `handler3` because it is the only one listening to the pattern `*.error*`. diff --git a/search/docs/nats/index.md b/search/docs/nats/index.md new file mode 100644 index 0000000..5fb8de0 --- /dev/null +++ b/search/docs/nats/index.md @@ -0,0 +1,35 @@ +# NATS + +!!! note "" + **FastStream** *NATS* support is implemented on top of [**nats-py**](https://github.com/nats-io/nats.py){.external-link target="_blank"}. You can always get access to objects of it if you need to use some low-level methods not represented in **FastStream**. + +## Advantages and Disadvantages + +*NATS* is an easy-to-use, high-performance message broker written in *Golang*. If your application does not require complex routing logic, can cope with high loads, scales, and does not require large hardware costs, *NATS* will be an excellent choice for you. + +Also *NATS* has a zero-cost new entities creation (to be honest, all `subjects` are just routing fields), so it can be used as a **RPC** over **MQ** tool. + +!!! note + More information about *NATS* can be found on the [official website](https://nats.io){.external-link target="_blank"}. + +However, *NATS* has disadvantages that you should be aware of: + +* Messages are not persistent. If a message is published while your consumer is disconnected, it will be lost. +* There are no complex routing mechanisms. +* There are no mechanisms for confirming receipt and processing of messages from the consumer. + +## NATS JetStream + +These shortcomings are corrected by using the persistent level - [**JetStream**](https://docs.nats.io/nats-concepts/jetstream){.external-link target="_blank"}. If you need strict guarantees for the delivery and processing of messages at the small detriment of speed and resources consumed, you can use **NatsJS**. + +Also, **NatsJS** supports some high-level features like *Key-Value* and *Object* storages (with subscription to changes on it) and provides you with rich abilities to build your logic on top of it. + +## Routing Rules + +*NATS* does not have the ability to configure complex routing rules. The only entity in *NATS* is `subject`, which can be subscribed to either directly by name or by a regular expression pattern. + +Both examples are discussed [a little further](./examples/direct.md){.internal-link}. + +In order to support the ability to scale consumers horizontally, *NATS* supports the `queue group` functionality: +a message sent to `subject` will be processed by a random consumer from the `queue group` subscribed to this `subject`. +This approach allows you to increase the processing speed of `subject` by *N* times when starting *N* consumers with one group. diff --git a/search/docs/nats/jetstream/ack.md b/search/docs/nats/jetstream/ack.md new file mode 100644 index 0000000..f74b0d0 --- /dev/null +++ b/search/docs/nats/jetstream/ack.md @@ -0,0 +1,78 @@ +# Consuming Acknowledgements + +As you may know, *Nats* employs a rather extensive [Acknowledgement](https://docs.nats.io/using-nats/developer/develop_jetstream#acknowledging-messages){.external-link target="_blank"} policy. + +In most cases, **FastStream** automatically acknowledges (*acks*) messages on your behalf. When your function executes correctly, including sending all responses, a message will be acknowledged (and rejected in case of an exception). + +However, there are situations where you might want to use different acknowledgement logic. + +## Retries + +If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic. + +By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue: + +```python +@broker.subscriber("test", retry=False) # don't handle exceptions +async def base_handler(body: str): + ... +``` + +If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one: + +```python +@broker.subscriber("test", retry=True) # try again indefinitely +async def base_handler(body: str): + ... +``` + +!!! tip + For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"} + +## Manual Acknowledgement + +If you want to acknowledge a message manually, you can get access directy to the message object via the [Context](../../getting-started/context/existed.md){.internal-link} and call the method. + +```python +from faststream.nats.annotations import NatsMessage + +@broker.subscriber("test") +async def base_handler(body: str, msg: NatsMessage): + await msg.ack() + # or + await msg.nack() + # or + await msg.reject() +``` + +**FastStream** will see that the message was already acknowledged and will do nothing at the end of the process. + +## Interrupt Process + +If you want to interrupt message processing at any call stack, you can raise `faststream.exceptions.AckMessage` + +``` python linenums="1" hl_lines="2 16" +from faststream import FastStream +from faststream.exceptions import AckMessage +from faststream.nats import NatsBroker + +broker = NatsBroker("nats://localhost:4222") +app = FastStream(broker) + + +@broker.subscriber("test-subject", stream="test-stream") +async def handle(body): + smth_processing(body) + + +def smth_processing(body): + if True: + raise AckMessage() + + +@app.after_startup +async def test_publishing(): + await broker.publish("Hello!", "test-subject") +``` + +This way, **FastStream** interrupts the current message proccessing and acknowledges it immediately. Also, you can raise `NackMessage` and `RejectMessage` too. diff --git a/search/docs/nats/jetstream/index.md b/search/docs/nats/jetstream/index.md new file mode 100644 index 0000000..ddec9ff --- /dev/null +++ b/search/docs/nats/jetstream/index.md @@ -0,0 +1,53 @@ +# NATS JetStream + +The default *NATS* usage is suitable for scenarios where: + +* The `publisher` and `consumer` are always online. +* The system can tolerate messages loss. + +If you need stricter restrictions, like: + +* An availability of a message processing confirmation mechanism (`ack`/`nack`). +* Message persistence (messages will accumulate in the queue when the `consumer` is offline). + +You should use the **NATS JetStream** extension. + +In fact, the **JetStream** extension is the same as *NATS*, with the addition of a persistent layer above the file system. Therefore, all interfaces for publishing and consuming messages are similar to regular *NATS* usage. + +However, the **JetStream** layer has many possibilities for configuration, from the policy of deleting old messages to the maximum stored messages number limit. You can find out more about all **JetStream** features in the official [documentation](https://docs.nats.io/using-nats/developer/develop_jetstream){.external-link target="_blank"}. + +!!! tip "" + If you have worked with other message brokers, then you should know that the logic of **JS** is closer to **Kafka** than to **RabbitMQ**: messages, after confirmation, are not deleted from the queue but remain there until the queue is full, and it will start deleting old messages (or in accordance with other logic that you can configure yourself). + + When connecting a `consumer` (and, especially, when reconnecting), you must determine for yourself according to what logic it will consume messages: from the subject beginning, starting with some message, starting from some time, only new ones, etc. Don't be surprised if a connection is restored, and your `consumer` starts to process all messages received earlier again - you haven't defined the rule. + +Also, **NATS JetStream** has built-in `key-value` (similar to **Redis**) and `object` (similar to **Minio**) storages, which, in addition to the interface for *put/get*, have the ability to subscribe to events, which can be extremely useful in various scenarios. + +**FastStream** does not provide access to this functionality directly, but it is covered by the [nats-py](https://github.com/nats-io/nats.py){.external-link target="_blank"} library used. You can access the **JS** object from the application context: + +```python linenums="1" hl_lines="2 7 11-12 21" +from faststream import FastStream, Logger +from faststream.nats import JStream, NatsBroker + +broker = NatsBroker() +app = FastStream(broker) + +stream = JStream(name="stream") + +@broker.subscriber( + "js-subject", + stream=stream, + deliver_policy="new", +) +async def handler(msg: str, logger: Logger): + logger.info(msg) + +@app.after_startup +async def test_send(): + await broker.publish("Hi!", "js-subject") + # publish with stream verification + await broker.publish("Hi!", "js-subject", stream="stream") +``` + +!!! tip + Using `JStream` object **FastStream** is trying to create/update stream with the object settings. To prevent this behavior and *just get already created stream*, please use `#!python JStream(..., declare=False)` option. diff --git a/search/docs/nats/jetstream/key-value.md b/search/docs/nats/jetstream/key-value.md new file mode 100644 index 0000000..3de9762 --- /dev/null +++ b/search/docs/nats/jetstream/key-value.md @@ -0,0 +1,107 @@ +# Key-Value Storage + +## Overview + +[*Key-Value*](https://docs.nats.io/nats-concepts/jetstream/key-value-store){.external-link target="_blank"} storage is just a high-level interface on top of *NatsJS*. + +It is a regular *JetStream*, where the *KV key* is a subject. + +`Put`/`Update` an object to *KV* by *key* - it's like publishing a new message to the corresponding subject in the stream. + +Thus, the `Get` command returns not only the current *key* value but the latest one with an offset of it. Additionally, you can ask for a specific value based on its offset in the *KV* stream. + +This interface provides you with rich abilities to use it like a regular *KV* storage (ignoring offset) + subscribe to *KV key* changes + ask for an old *KV value* revision. So you can use this feature in your application in a really different way. You can find some examples on the *NATS* developers' official [YouTube channel](https://youtube.com/@NATS_io?si=DWHvNFjsLruxg5OZ){.external-link target="_blank"} + +## FastStream Details + +**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to get access into the inner `JetStream` object to create it manually. + +First of all, you need to create a *Key-Value* storage object and pass it into the context: + +```python linenums="1" hl_lines="14-15" +from faststream import Context, FastStream, Logger +from faststream.nats import NatsBroker +from faststream.nats.annotations import ContextRepo +broker = NatsBroker() +app = FastStream(broker) +@app.on_startup +async def setup_broker(context: ContextRepo): + await broker.connect() + + kv = await broker.stream.create_key_value(bucket="bucket") + context.set_global("kv", kv) +``` + +!!! tip + We placed this code in `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. + + Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + +--- + +Next, we are ready to use this object right in our handlers. + +Let's create an annotated object to shorten context object access: + +```python linenums="1" hl_lines="5" +from nats.js.kv import KeyValue as KV +from typing_extensions import Annotated +KeyValue = Annotated[KV, Context("kv")] +``` + +And just use it in a handler: + +```python linenums="1" hl_lines="4 7-8" +from faststream import Logger +@broker.subscriber("subject") +async def handler(msg: str, kv: KeyValue, logger: Logger): + logger.info(msg) + kv_data = await kv.get("key") + assert kv_data.value == b"Hello!" +``` + +Finally, let's test our code behavior by putting something into the KV storage and sending a message: + +```python linenums="1" hl_lines="3-4" +@app.after_startup +async def test_send(kv: KeyValue): + await kv.put("key", b"Hello!") + await broker.publish("Hi!", "subject") +``` + +??? example "Full listing" + ```python linenums="1" +from nats.js.kv import KeyValue as KV +from typing_extensions import Annotated + +from faststream import Logger +from faststream import Context, FastStream, Logger +from faststream.nats import NatsBroker +from faststream.nats.annotations import ContextRepo + +KeyValue = Annotated[KV, Context("kv")] + +broker = NatsBroker() +app = FastStream(broker) + + +@broker.subscriber("subject") +async def handler(msg: str, kv: KeyValue, logger: Logger): + logger.info(msg) + kv_data = await kv.get("key") + assert kv_data.value == b"Hello!" + + +@app.on_startup +async def setup_broker(context: ContextRepo): + await broker.connect() + + kv = await broker.stream.create_key_value(bucket="bucket") + context.set_global("kv", kv) + + +@app.after_startup +async def test_send(kv: KeyValue): + await kv.put("key", b"Hello!") + await broker.publish("Hi!", "subject") + ``` diff --git a/search/docs/nats/jetstream/object.md b/search/docs/nats/jetstream/object.md new file mode 100644 index 0000000..7e24a7a --- /dev/null +++ b/search/docs/nats/jetstream/object.md @@ -0,0 +1,111 @@ +# Object Storage + +Object storage is almost identical to the [*Key-Value*](./key-value.md) stroge concept, so you can reuse the guide. + +## Overview + +[*Object Storage*](https://docs.nats.io/nats-concepts/jetstream/obj_store){.external-link target="_blank"} is just a high-level interface on top of *NatsJS*. + +It is a regular *JetStream*, where the *Object key* is a subject. + +The main difference between *KV* and *Object* storages is that in the *Object* storage, you can store files greater than **1MB** (a limitation of *KV*). It has no limit on the maximum object size and stores it in chunks (each message is an object chunk), so you can literally *stream* huge objects through *NATS*. + +## FastStream Details + +**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to access the inner `JetStream` object to create in manually. + +First of all, you need to create an *Object* storage object and pass in to the context: + +```python linenums="1" hl_lines="14-15" +from faststream import Context, FastStream +from faststream.nats import NatsBroker +from faststream.nats.annotations import ContextRepo +broker = NatsBroker() +app = FastStream(broker) +@app.on_startup +async def setup_broker(context: ContextRepo): + await broker.connect() + + os = await broker.stream.create_object_store("bucket") + context.set_global("OS", os) +``` + +!!! tip + We placed this code in the `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. + + Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. + +--- + +Next, we are ready to use this object right in the our handlers. + +Let's create an Annotated object to shorten `Context` object access: + +```python linenums="1" hl_lines="5" +from nats.js.object_store import ObjectStore as OS +from typing_extensions import Annotated +ObjectStorage = Annotated[OS, Context("OS")] +``` + +And just use it in a handler: + +```python linenums="1" hl_lines="8 10-11" +from io import BytesIO +from faststream import Logger +@broker.subscriber("subject") +async def handler(msg: str, os: ObjectStorage, logger: Logger): + logger.info(msg) + obj = await os.get("file") + assert obj.data == b"File mock" +``` + +Finally, let's test our code behavior by putting something into the *Object storage* and sending a message: + +```python linenums="1" hl_lines="3-4" +@app.after_startup +async def test_send(os: ObjectStorage): + await os.put("file", BytesIO(b"File mock")) + await broker.publish("Hi!", "subject") +``` + +!!! tip + [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. + +??? example "Full listing" + ```python linenums="1" +from io import BytesIO + +from nats.js.object_store import ObjectStore as OS +from typing_extensions import Annotated + +from faststream import Logger +from faststream import Context, FastStream +from faststream.nats import NatsBroker +from faststream.nats.annotations import ContextRepo + +ObjectStorage = Annotated[OS, Context("OS")] + +broker = NatsBroker() +app = FastStream(broker) + + +@broker.subscriber("subject") +async def handler(msg: str, os: ObjectStorage, logger: Logger): + logger.info(msg) + obj = await os.get("file") + assert obj.data == b"File mock" + + +@app.on_startup +async def setup_broker(context: ContextRepo): + await broker.connect() + + os = await broker.stream.create_object_store("bucket") + context.set_global("OS", os) + + +@app.after_startup +async def test_send(os: ObjectStorage): + await os.put("file", BytesIO(b"File mock")) + await broker.publish("Hi!", "subject") + ``` diff --git a/search/docs/nats/message.md b/search/docs/nats/message.md new file mode 100644 index 0000000..36fd983 --- /dev/null +++ b/search/docs/nats/message.md @@ -0,0 +1,104 @@ +# Access to Message Information + +As you know, **FastStream** serializes a message body and provides you access to it through function arguments. But sometimes you want access to message_id, headers, or other meta-information. + +## Message Access + +You can get it in a simple way: just acces the message object in the [Context](../getting-started/context/existed.md){.internal-link}! + +It contains the required information such as: + +* `#!python body: bytes` +* `#!python decoded_body: Any` +* `#!python content_type: str` +* `#!python reply_to: str` +* `#!python headers: dict[str, Any]` +* `#!python message_id: str` +* `#!python correlation_id: str` + +It is a **FastStream** wrapper around a native broker library message (`nats.aio.msg.Msg` in the *NATS*' case), you can access with `raw_message`. + +```python hl_lines="1 6" +from faststream.nats.annotations import NatsMessage + +@broker.subscriber("test") +async def base_handler( + body: str, + msg: NatsMessage, +): + print(msg.correlation_id) +``` + +Also, if you can't find the information you reqiure, you can get access directly to the wrapped `nats.aio.msg.Msg`, which contains complete message information. + +```python hl_lines="6" +from nats.aio.msg import Msg +from faststream.nats.annotations import NatsMessage + +@broker.subscriber("test") +async def base_handler(body: str, msg: NatsMessage): + raw: Msg = msg.raw_message + print(raw) +``` + +## Message Fields Access + +But in most cases, you don't need all message fields; you need to access some of them. You can use [Context Fields access](../getting-started/context/fields.md){.internal-link} feature for this reason. + +For example, you can get access to the `correlation_id` like this: + +```python hl_lines="6" +from faststream import Context + +@broker.subscriber("test") +async def base_handler( + body: str, + cor_id: str = Context("message.correlation_id"), +): + print(cor_id) +``` + +Or even directly from the raw message: + +```python hl_lines="6" +from faststream import Context + +@broker.subscriber("test") +async def base_handler( + body: str, + cor_id: str = Context("message.raw_message.correlation_id"), +): + print(cor_id) +``` + +But this code is too long to reuse everywhere. In this case, you can use a Python [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-link target="_blank"} feature: + +=== "python 3.9+" + ```python hl_lines="4 9" + from types import Annotated + from faststream import Context + + CorrelationId = Annotated[str, Context("message.correlation_id")] + + @broker.subscriber("test") + async def base_handler( + body: str, + cor_id: CorrelationId, + ): + print(cor_id) + ``` + +=== "python 3.6+" + ```python hl_lines="4 9" + from typing_extensions import Annotated + from faststream import Context + + CorrelationId = Annotated[str, Context("message.correlation_id")] + + @broker.subscriber("test") + async def base_handler( + body: str, + cor_id: CorrelationId, + ): + print(cor_id) + ``` diff --git a/search/docs/nats/publishing/index.md b/search/docs/nats/publishing/index.md new file mode 100644 index 0000000..898bf2b --- /dev/null +++ b/search/docs/nats/publishing/index.md @@ -0,0 +1,40 @@ +# Publishing + +**FastStream** `NatsBroker` supports all regular [publishing usecases](../../getting-started/publishing/index.md){.internal-link}. You can use them without any changes. + +However, if you wish to further customize the publishing logic, you should take a deeper look at specific `NatsBroker` parameters. + +## NATS Publishing + +`NatsBroker` also uses the unified `publish` method (from a `publisher` object) to send messages. + +``` python +import asyncio +from faststream.nats import NatsBroker + +async def pub(): + async with NatsBroker() as broker: + await broker.publish( + "Hi!", + subject="test", + ) + +asyncio.run(pub()) +``` + +## Basic Arguments + +The `publish` method accepts the following arguments: + +* `#!python message = ""` - message to send. +* `#!python subject: str` - *subject* where the message will be sent. + +## Message Parameters + +* `#!python headers: dict[str, str] | None = None` - headers of the message being sent (used by consumers). +* `#!python correlation_id: str | None = None` - message id, which helps to match the original message with the reply to it (generated automatically). + +## NatsJS Parameters + +* `#!python stream: str | None = None` - validate that the subject is in the stream. +* `#!python timeout: float | None = None` - wait for the NATS server response. diff --git a/search/docs/nats/rpc.md b/search/docs/nats/rpc.md new file mode 100644 index 0000000..33a9fe8 --- /dev/null +++ b/search/docs/nats/rpc.md @@ -0,0 +1,45 @@ +# RPC over NATS + +Because **NATS** has zero cost for creating new subjects, we can easily set up a new subject consumer just for the one response message. This way, your request message will be published to one topic, and the response message will be consumed from another one (temporary subject), which allows you to use regular **FastStream RPC** syntax in the **NATS** case too. + +!!! tip + **FastStream RPC** over **NATS** works in both the *NATS-Core* and *NATS-JS* cases as well, but in the *NATS-JS* case, you have to specify the expected `stream` as a publish argument. + +## Blocking Request + +**FastStream** provides you with the ability to send a blocking RPC request over *NATS* in a very simple way. + +Just send a message like a regular one and get a response synchronously. + +It is very close to the common **requests** syntax: + +``` python hl_lines="1 4" +msg = await broker.publish( + "Hi!", + subject="test", + rpc=True, +) +``` + +Also, you have two extra options to control this behavior: + +* `#!python rpc_timeout: Optional[float] = 30.0` - controls how long you are waiting for a response. +* `#!python raise_timeout: bool = False` - by default, a timeout request returns `None`, but if you need to raise a `TimeoutException` directly, you can specify this option. + +## Reply-To + +Also, if you want to create a permanent request-reply data flow, probably, you should create a permanent subject to consume responses. + +So, if you have such one, you can specify it with the `reply_to` argument. This way, **FastStream** will send a response to this subject automatically. + +``` python hl_lines="1 8" +@broker.subscriber("response-subject") +async def consume_responses(msg): + ... + +msg = await broker.publish( + "Hi!", + subject="test", + reply_to="response-subject", +) +``` diff --git a/search/docs/rabbit/ack.md b/search/docs/rabbit/ack.md new file mode 100644 index 0000000..c37b8c3 --- /dev/null +++ b/search/docs/rabbit/ack.md @@ -0,0 +1,90 @@ +# Consuming Acknowledgements + +As you may know, *RabbitMQ* employs a rather extensive [Acknowledgement](https://www.rabbitmq.com/confirms.html){.external-link target="_blank"} policy. + +In most cases, **FastStream** automatically acknowledges (*acks*) messages on your behalf. When your function executes correctly, including sending all responses, a message will be acknowledged (and rejected in case of an exception). + +However, there are situations where you might want to use a different acknowledgement logic. + +## Retries + +If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic. + +By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue: + +```python +@broker.subscriber("test", retry=False) # don't handle exceptions +async def base_handler(body: str): + ... +``` + +If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one: + +```python +@broker.subscriber("test", retry=True) # try again indefinitely +async def base_handler(body: str): + ... +``` + +If the `retry` flag is set to an `int`, the message will be placed back in the queue, and the number of retries will be limited to this number: + +```python +@broker.subscriber("test", retry=3) # make up to 3 attempts +async def base_handler(body: str): + ... +``` + +!!! bug + At the moment, attempts are counted only by the current consumer. If the message goes to another consumer, it will have its own counter. + Subsequently, this logic will be reworked. + +!!! tip + For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"} + +## Manual acknowledgement + +If you want to acknowledge a message manually, you can get access directy to the message object via the [Context](../getting-started/context/existed.md){.internal-link} and call the method. + +```python +from faststream.rabbit.annotations import RabbitMessage + +@broker.subscriber("test") +async def base_handler(body: str, msg: RabbitMessage): + await msg.ack() + # or + await msg.nack() + # or + await msg.reject() +``` + +**FastStream** will see that the message was already acknowledged and will do nothing at process end. + +## Interrupt Process + +If you want to interrupt message processing at any call stack, you can raise `faststream.exceptions.AckMessage` + +``` python linenums="1" hl_lines="2 16" +from faststream import FastStream +from faststream.exceptions import AckMessage +from faststream.rabbit import RabbitBroker + +broker = RabbitBroker("amqp://guest:guest@localhost:5672/") +app = FastStream(broker) + + +@broker.subscriber("test-queue") +async def handle(body): + smth_processing(body) + + +def smth_processing(body): + if True: + raise AckMessage() + + +@app.after_startup +async def test_publishing(): + await broker.publish("Hello!", "test-queue") +``` + +This way, **FastStream** interrupts the current message proccessing and acknowledges it immediately. Also, you can raise `NackMessage` and `RejectMessage` too. diff --git a/search/docs/rabbit/declare.md b/search/docs/rabbit/declare.md new file mode 100644 index 0000000..469e08a --- /dev/null +++ b/search/docs/rabbit/declare.md @@ -0,0 +1,40 @@ +# RabbitMQ Queue/Exchange Declaration + +**FastStream** declares and validates all exchanges and queues using *publishers* and *subscribers* *RabbitMQ* objects, but sometimes you need to declare them manually. + +**RabbitBroker** provides a way to achieve this easily. + +``` python linenums="1" hl_lines="15-20 22-27" +from faststream import FastStream +from faststream.rabbit import ( + ExchangeType, + RabbitBroker, + RabbitExchange, + RabbitQueue, +) + +broker = RabbitBroker() +app = FastStream(broker) + + +@app.after_startup +async def declare_smth(): + await broker.declare_exchange( + RabbitExchange( + name="some-exchange", + type=ExchangeType.FANOUT, + ) + ) + + await broker.declare_queue( + RabbitQueue( + name="some-queue", + durable=True, + ) + ) +``` + +These methods require just one argument (`RabbitQueue`/`RabbitExchange`) containing information about your *RabbitMQ* required objects. They declare/validate *RabbitMQ* objects and return low-level **aio-pika** robust objects to interact with. + +!!! tip + Also, these methods are idempotent, so you can call them with the same arguments multiple times, but the objects will be created once; next time the method will return an already stored object. This way you can get access to any queue/exchange created automatically. diff --git a/search/docs/rabbit/examples/direct.md b/search/docs/rabbit/examples/direct.md new file mode 100644 index 0000000..c2a1fb0 --- /dev/null +++ b/search/docs/rabbit/examples/direct.md @@ -0,0 +1,129 @@ +# Direct Exchange + +The **Direct** Exchange is the basic way to route messages in *RabbitMQ*. Its core is very simple: the `exchange` sends messages to those queues whose `routing_key` matches the `routing_key` of the message being sent. + +!!! note + The **Default** Exchange, to which all queues in *RabbitMQ* are subscribed, has the **Direct** type by default. + +## Scaling + +If several consumers are listening to the same queue, messages will be distributed to one of them (round-robin). This behavior is common for all types of `exchange` because it refers to the queue itself. The type of `exchange` affects which queues the message gets into. + +Thus, *RabbitMQ* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by launching additional instances of a consumer service. You don't need to make changes to the current infrastructure configuration: RabbitMQ will take care of how to distribute messages between your services. + +## Example + +!!! tip + The **Direct** Exchange is the type used in **FastStream** by default. You can simply declare it as follows: + + ```python + @broker.subscriber("test_queue", "test_exchange") + async def handler(): + ... + ``` + +The argument `auto_delete=True` in this and subsequent examples is used only to clear the state of *RabbitMQ* after example runs. + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.rabbit import RabbitBroker, RabbitExchange, RabbitQueue + +broker = RabbitBroker() +app = FastStream(broker) + +exch = RabbitExchange("exchange", auto_delete=True) + +queue_1 = RabbitQueue("test-q-1", auto_delete=True) +queue_2 = RabbitQueue("test-q-2", auto_delete=True) + + +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") + + +@app.after_startup +async def send_messages(): + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 2 + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 + await broker.publish(queue="test-q-2", exchange=exch) # handlers: 3 +``` + +### Consumer Announcement + +First, we announce our **Direct** exchange and several queues that will listen to it: + +```python linenums="7" +exch = RabbitExchange("exchange", auto_delete=True) + +queue_1 = RabbitQueue("test-q-1", auto_delete=True) +queue_2 = RabbitQueue("test-q-2", auto_delete=True) +``` + +Then we sign up several consumers using the advertised queues to the `exchange` we created: + +```python linenums="13" hl_lines="1 6 11" +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") +``` + +!!! note + `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: + within a single service, this does not make sense, since messages will come to these handlers in turn. + Here we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now, the distribution of messages between these consumers will look like this: + +```python linenums="30" + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 +``` + +Message `1` will be sent to `handler1` because it listens to the `exchange` using a queue with the routing key `test-q-1`. + +--- + +```python linenums="31" + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 2 +``` + +Message `2` will be sent to `handler2` because it listens to the `exchange` using the same queue, but `handler1` is busy. + +--- + +```python linenums="32" + await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 +``` + +Message `3` will be sent to `handler1` again because it is currently free. + +--- + +```python linenums="33" + await broker.publish(queue="test-q-2", exchange=exch) # handlers: 3 +``` + +Message `4` will be sent to `handler3` because it is the only one listening to the `exchange` using a queue with the routing key `test-q-2`. diff --git a/search/docs/rabbit/examples/fanout.md b/search/docs/rabbit/examples/fanout.md new file mode 100644 index 0000000..6cb9de1 --- /dev/null +++ b/search/docs/rabbit/examples/fanout.md @@ -0,0 +1,93 @@ +# Fanout Exchange + +The **Fanout** Exchange is an even simpler, but slightly less popular way of routing in *RabbitMQ*. This type of `exchange` sends messages to all queues subscribed to it, ignoring any arguments of the message. + +At the same time, if the queue listens to several consumers, messages will also be distributed among them. + +## Example + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue + +broker = RabbitBroker() +app = FastStream(broker) + +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.FANOUT) + +queue_1 = RabbitQueue("test-q-1", auto_delete=True) +queue_2 = RabbitQueue("test-q-2", auto_delete=True) + + +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") + + +@app.after_startup +async def send_messages(): + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 +``` + +### Consumer Announcement + +To begin with, we announced our **Fanout** exchange and several queues that will listen to it: + +```python linenums="7" hl_lines="1" +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.FANOUT) + +queue_1 = RabbitQueue("test-q-1", auto_delete=True) +queue_2 = RabbitQueue("test-q-2", auto_delete=True) +``` + +Then we signed up several consumers using the advertised queues to the `exchange` we created: + +```python linenums="13" hl_lines="1 6 11" +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") +``` + +!!! note + `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: + within a single service, this does not make sense, since messages will come to these handlers in turn. + Here we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now the all messages will be send to all subscribers due they are binded to the same **FANOUT** exchange: + +```python linenums="30" + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 + await broker.publish(exchange=exch) # handlers: 1, 2, 3 +``` + +--- + +!!! note + When sending messages to **Fanout** exchange, it makes no sense to specify the arguments `queue` or `routing_key`, because they will be ignored. diff --git a/search/docs/rabbit/examples/headers.md b/search/docs/rabbit/examples/headers.md new file mode 100644 index 0000000..017bcb7 --- /dev/null +++ b/search/docs/rabbit/examples/headers.md @@ -0,0 +1,181 @@ +# Header Exchange + +The **Header** Exchange is the most complex and flexible way to route messages in *RabbitMQ*. This `exchange` type sends messages to queues according by matching the queue binding arguments with message headers. + +At the same time, if several consumers are subscribed to the queue, messages will also be distributed among them. + +## Example + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue + +broker = RabbitBroker() +app = FastStream(broker) + +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.HEADERS) + +queue_1 = RabbitQueue( + "test-queue-1", + auto_delete=True, + bind_arguments={"key": 1}, +) +queue_2 = RabbitQueue( + "test-queue-2", + auto_delete=True, + bind_arguments={"key": 2, "key2": 2, "x-match": "any"}, +) +queue_3 = RabbitQueue( + "test-queue-3", + auto_delete=True, + bind_arguments={"key": 2, "key2": 2, "x-match": "all"}, +) + + +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") + + +@broker.subscriber(queue_3, exch) +async def base_handler4(logger: Logger): + logger.info("base_handler4") + + +@app.after_startup +async def send_messages(): + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 2 + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 + await broker.publish(exchange=exch, headers={"key": 2}) # handlers: 3 + await broker.publish(exchange=exch, headers={"key2": 2}) # handlers: 3 + await broker.publish( + exchange=exch, headers={"key": 2, "key2": 2.0} + ) # handlers: 3, 4 +``` + +### Consumer Announcement + +First, we announce our **Header** exchange and several queues that will listen to it: + +```python linenums="7" hl_lines="1 6 11 16" +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.HEADERS) + +queue_1 = RabbitQueue( + "test-queue-1", + auto_delete=True, + bind_arguments={"key": 1}, +) +queue_2 = RabbitQueue( + "test-queue-2", + auto_delete=True, + bind_arguments={"key": 2, "key2": 2, "x-match": "any"}, +) +queue_3 = RabbitQueue( + "test-queue-3", + auto_delete=True, + bind_arguments={"key": 2, "key2": 2, "x-match": "all"}, +) +``` + +The `x-match` argument indicates whether the arguments should match the message headers in whole or in part. + +Then we signed up several consumers using the advertised queues to the `exchange` we created: + +```python linenums="26" hl_lines="1 6 11 16" +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") + + +@broker.subscriber(queue_3, exch) +async def base_handler4(logger: Logger): + logger.info("base_handler4") +``` + +!!! note + `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: + within a single service, this does not make sense, since messages will come to these handlers in turn. + Here we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now the distribution of messages between these consumers will look like this: + +```python linenums="48" + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 +``` + +Message `1` will be sent to `handler1` because it listens to a queue whose `key` header matches the `key` header of the message. + +--- + +```python linenums="49" + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 2 +``` + +Message `2` will be sent to `handler2` because it listens to `exchange` using the same queue, but `handler1` is busy. + +--- + +```python linenums="50" + await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 +``` + +Message `3` will be sent to `handler1` again because it is currently free. + +--- + +```python linenums="51" + await broker.publish(exchange=exch, headers={"key": 2}) # handlers: 3 +``` + +Message `4` will be sent to `handler3` because it listens to a queue whose `key` header coincided with the `key` header of the message. + +--- + +```python linenums="52" + await broker.publish(exchange=exch, headers={"key2": 2}) # handlers: 3 +``` + +Message `5` will be sent to `handler3` because it listens to a queue whose header `key2` coincided with the header `key2` of the message. + +--- + +```python linenums="53" + await broker.publish( + exchange=exch, headers={"key": 2, "key2": 2.0} + ) # handlers: 3, 4 +``` + +Message `6` will be sent to `handler3` and `handler4` because the message headers completely match the queue keys. + +--- + +!!! note + When sending messages to **Header** exchange, it makes no sense to specify the arguments `queue` or `routing_key`, because they will be ignored + +!!! warning + For incredibly complex routes, you can use the option to bind an `exchange` to another `exchange`. In this case, all the same rules apply as for queues subscribed to `exchange`. The only difference is that the signed `exchange` can further distribute messages according to its own rules. + + So, for example, you can combine Topic and Header exchange types. diff --git a/search/docs/rabbit/examples/index.md b/search/docs/rabbit/examples/index.md new file mode 100644 index 0000000..167f06f --- /dev/null +++ b/search/docs/rabbit/examples/index.md @@ -0,0 +1,36 @@ +# Basic Subscriber + +If you know nothing about *RabbitMQ* and how it works, you will still able to use **FastStream RabbitBroker**. + +Just use the `#!python @broker.subscriber(...)` method with a string as a routing key. + +```python linenums="1" +from faststream import FastStream +from faststream.rabbit import RabbitBroker + +broker = RabbitBroker() +app = FastStream(broker) + + +@broker.subscriber("routing_key") # handle messages by routing key +async def handle(msg): + print(msg) + + +@app.after_startup +async def test_publish(): + await broker.publish( + "message", + "routing_key", # publish message with routing key + ) +``` + +This is the principle all **FastStream** brokers work by: you don't need to learn them in-depth if you want to *just send a message*. + +## RabbitMQ Details + +If you are already familiar with *RabbitMQ* logic, you should also be acquainted with the inner workings of the example mentioned above. + +In this case, **FastStream** either creates or validates a queue with a specified **routing_key** and binds it to the default *RabbitMQ* exchange. + +If you want to specify a *queue*-*exchange* pair with additional arguments, **FastStream** provides you with the ability to do so. You can use special `RabbitQueue` and `RabbitExchange` objects to configure RabbitMQ queues, exchanges, and binding properties. For examples of using various types of exchanges, please refer to the following articles. diff --git a/search/docs/rabbit/examples/stream.md b/search/docs/rabbit/examples/stream.md new file mode 100644 index 0000000..697879e --- /dev/null +++ b/search/docs/rabbit/examples/stream.md @@ -0,0 +1,36 @@ +# RabbitMQ Streams + +*RabbitMQ* has a [Streams](https://www.rabbitmq.com/streams.html){.exteranl-link target="_blank"} feature, which is closely related to *Kafka* topics. + +The main difference from regular *RabbitMQ* queues is that the messages are not deleted after consuming. + +And **FastStream** supports this feature as well! + +```python linenums="1" hl_lines="4 10-12 17" +from faststream import FastStream, Logger +from faststream.rabbit import RabbitBroker, RabbitQueue + +broker = RabbitBroker(max_consumers=10) +app = FastStream(broker) + +queue = RabbitQueue( + name="test-stream", + durable=True, + arguments={ + "x-queue-type": "stream", + }, +) + + +@broker.subscriber( + queue, + consume_args={"x-stream-offset": "first"}, +) +async def handle(msg, logger: Logger): + logger.info(msg) + + +@app.after_startup +async def test(): + await broker.publish("Hi!", queue) +``` diff --git a/search/docs/rabbit/examples/topic.md b/search/docs/rabbit/examples/topic.md new file mode 100644 index 0000000..7b32df7 --- /dev/null +++ b/search/docs/rabbit/examples/topic.md @@ -0,0 +1,113 @@ +# Topic Exchange + +The **Topic** Exchange is a powerful *RabbitMQ* routing tool. This type of `exchange` sends messages to the queue in accordance with the *pattern* specified when they are connected to `exchange` and the `routing_key` of the message itself. + +At the same time, if several consumers are subscribed to the queue, messages will be distributed among them. + +## Example + +```python linenums="1" +from faststream import FastStream, Logger +from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue + +broker = RabbitBroker() +app = FastStream(broker) + +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.TOPIC) + +queue_1 = RabbitQueue("test-queue-1", auto_delete=True, routing_key="*.info") +queue_2 = RabbitQueue("test-queue-2", auto_delete=True, routing_key="*.debug") + + +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") + + +@app.after_startup +async def send_messages(): + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 2 + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 + await broker.publish(routing_key="logs.debug", exchange=exch) # handlers: 3 +``` + +### Consumer Announcement + +First, we announce our **Topic** exchange and several queues that will listen to it: + +```python linenums="7" hl_lines="1 3-4" +exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.TOPIC) + +queue_1 = RabbitQueue("test-queue-1", auto_delete=True, routing_key="*.info") +queue_2 = RabbitQueue("test-queue-2", auto_delete=True, routing_key="*.debug") +``` + +At the same time, in the `routing_key` of our queues, we specify the *pattern* of routing keys that will be processed by this queue. + +Then we sign up several consumers using the advertised queues to the `exchange` we created: + +```python linenums="13" hl_lines="1 6 11" +@broker.subscriber(queue_1, exch) +async def base_handler1(logger: Logger): + logger.info("base_handler1") + + +@broker.subscriber(queue_1, exch) # another service +async def base_handler2(logger: Logger): + logger.info("base_handler2") + + +@broker.subscriber(queue_2, exch) +async def base_handler3(logger: Logger): + logger.info("base_handler3") +``` + +!!! note + `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: + within a single service, this does not make sense, since messages will come to these handlers in turn. + Here we emulate the work of several consumers and load balancing between them. + +### Message Distribution + +Now the distribution of messages between these consumers will look like this: + +```python linenums="30" + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 +``` + +Message `1` will be sent to `handler1` because it listens to `exchange` using a queue with the routing key `*.info`. + +--- + +```python linenums="31" + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 2 +``` + +Message `2` will be sent to `handler2` because it listens to `exchange` using the same queue, but `handler1` is busy. + +--- + +```python linenums="32" + await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 +``` + +Message `3` will be sent to `handler1` again because it is currently free. + +--- + +```python linenums="33" + await broker.publish(routing_key="logs.debug", exchange=exch) # handlers: 3 +``` + +Message `4` will be sent to `handler3` because it is the only one listening to `exchange` using a queue with the routing key `*.debug`. diff --git a/search/docs/rabbit/index.md b/search/docs/rabbit/index.md new file mode 100644 index 0000000..92abf6f --- /dev/null +++ b/search/docs/rabbit/index.md @@ -0,0 +1,69 @@ +# Rabbit Routing + +!!! note "" + **FastStream** *RabbitMQ* support is implemented on top of [**aio-pika**](https://aio-pika.readthedocs.io/en/latest/){.external-link target="_blank"}. You can always get access to objects of it, if you need to use some low-level methods, not represented in **FastStream**. + +## Advantages + +The advantage of *RabbitMQ* is the ability to configure flexible and complex message routing scenarios. + +*RabbitMQ* covers the whole range of routing: from one queue - one consumer, to a queue retrieved from several sources, including message prioritization. + +!!! note + For more information about *RabbitMQ*, please visit the [official documentation](https://www.rabbitmq.com/tutorials/amqp-concepts.html){.external-link target="_blank"} + +It supports the ability to successfully process messages, mark them as processed with an error, remove them from the queue (it is also impossible to re-receive processed messages, unlike **Kafka**), lock it for the processing duration, and monitor its current status. + +Having to keep track of the current status of all messages is a cause of the *RabbitMQ* performance issues. With really large message volumes, *RabbitMQ* starts to degrade. However, if this was a "one-time influx", then consumers will free the queue of messages and the "health" of *RabbitMQ* will be stable. + +If your scenario is not based on processing millions of messages and also requires building complex routing logic, *RabbitMQ* will be the right choice. + +## Basic Concepts + +If you want to totally understand how *RabbitMQ* works, you should visit their official website. There you will find top-level comments about the basic concepts and usage examples. + +### Entities + +*RabbitMQ* works with three main entities: + +* `Exchange` - the point of receiving messages from *publisher* +* `Queue` - the point of pushing messages to *consumer* +* `Binding` - the relationship between *queue-exchange* or *exchange-exchange* + +### Routing Rules + +The rules for delivering messages to consumers depend on the **type of exchange** and **binding** parameters. All the main options will be discussed at [examples](examples/direct.md){.internal-link}. + +In general, the message path looks so: + +1. *Publisher* sends a message to `exchange`, specify its `routing_key` and headers according to which routing will take place. +2. `Exchange`, depending on the message parameters, determines which of the subscribed `bindings` to send the message to. +3. `Binding` delivers the message to `queue` or another `exchange` (in this case it will send it further by its own rules). +4. `Queue`, after receiving a message, sends it to one of subscribed consumers (**PUSH API**). + +!!! tip + By default, all queues have a `binding` to the `default exchange` (**Direct** type) with a **routing key** corresponding to their name. + In **FastStream**, queues are connected to this `exchange`, and messages are sent by default unless another `exchange` is explicitly specified. + + !!! warning "" + Connecting the queue to any other `exchange` will still leave it subscribed to the `default exchange'. Be careful with this. + +At this stage, the message gets into your application - and you start processing it. + +### Message Statuses + +*RabbitMQ* requires confirmation of message processing: only after that, it will be removed from the queue. + +Confirmation can be either positive (`Acknowledgment - ack`) if the message was successfully processed or negative (`Negative Acknowledgment - nack`) if the message was processed with an error. + +At the same time, in case of an error, the message can also be extracted from the queue (`reject`); otherwise, after a negative confirmation, it will be requeued for processing again. + +In most cases, **FastStream** performs all the necessary actions by itself. However, if you want to manage the message lifecycle directly, you can access the message object itself and call the appropriate methods directly. This can be useful if you want to implement an "at most once" policy and you need to confirm the consuming of the message before it is actually processed. + +## **FastStream** Specific + +**FastStream** omits the ability to create `bindings` directly, since in most cases, you do not need to subscribe one queue to several `exchanges` or subscribe `exchanges` to each other. On the contrary, this practice leads to over-complication of the message routing scheme, which makes it difficult to maintain and further develop the entire infrastructure of services. + +**FastStream** suggests you adhere to the scheme `exchange:queue` as `1:N`, which will greatly simplify the scheme of interaction between your services. It is better to create an additional queue for a new `exchange` than to subscribe to an existing one. + +However, if you want to reduce the number of entities in your *RabbitMQ*, and thereby optimize its performance (or you know exactly what you are doing), **FastStream** leaves you the option to create `bindings` directly. In other cases, the connection parameters are an integral part of the entities **RabbitQueue** and **RabbitExchange** in **FastStream**. diff --git a/search/docs/rabbit/message.md b/search/docs/rabbit/message.md new file mode 100644 index 0000000..97180cd --- /dev/null +++ b/search/docs/rabbit/message.md @@ -0,0 +1,104 @@ +# Access to Message Information + +As you know, **FastStream** serializes a message body and provides you access to it through function arguments. But sometimes you want access to a message_id, headers, or other meta-information. + +## Message Access + +You can get it in a simple way: just acces to the message object in the [Context](../getting-started/context/existed.md){.internal-link}! + +This message contains the required information such as: + +* `#!python body: bytes` +* `#!python decoded_body: Any` +* `#!python content_type: str` +* `#!python reply_to: str` +* `#!python headers: dict[str, Any]` +* `#!python message_id: str` +* `#!python correlation_id: str` + +Also, it is a **FastStream** wrapper around a native broker library message (`aio_pika.IncomingMessage` in the *RabbitMQ* case), you can access with `raw_message`. + +```python hl_lines="1 6" +from faststream.rabbit.annotations import RabbitMessage + +@broker.subscriber("test") +async def base_handler( + body: str, + msg: RabbitMessage, +): + print(msg.correlation_id) +``` + +Also, if you can't find the information you reqiure, you can get access directly to the wrapped `aio_pika.IncomingMessage`, which contains complete message information. + +```python hl_lines="6" +from aio_pika import IncomingMessage +from faststream.rabbit.annotations import RabbitMessage + +@broker.subscriber("test") +async def base_handler(body: str, msg: RabbitMessage): + raw: IncomingMessage = msg.raw_message + print(raw) +``` + +## Message Fields Access + +But in the most cases, you don't need all message fields; you need to access some of them. You can use [Context Fields access](../getting-started/context/fields.md){.internal-link} feature for this reason. + +For example, you can get access to the `correlation_id` like this: + +```python hl_lines="6" +from faststream import Context + +@broker.subscriber("test") +async def base_handler( + body: str, + cor_id: str = Context("message.correlation_id"), +): + print(cor_id) +``` + +Or even directly from the raw message: + +```python hl_lines="6" +from faststream import Context + +@broker.subscriber("test") +async def base_handler( + body: str, + cor_id: str = Context("message.raw_message.correlation_id"), +): + print(cor_id) +``` + +But this code is too long to be reused everywhere. In this case, you can use a Python [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-link target="_blank"} feature: + +=== "python 3.9+" + ```python hl_lines="4 9" + from types import Annotated + from faststream import Context + + CorrelationId = Annotated[str, Context("message.correlation_id")] + + @broker.subscriber("test") + async def base_handler( + body: str, + cor_id: CorrelationId, + ): + print(cor_id) + ``` + +=== "python 3.6+" + ```python hl_lines="4 9" + from typing_extensions import Annotated + from faststream import Context + + CorrelationId = Annotated[str, Context("message.correlation_id")] + + @broker.subscriber("test") + async def base_handler( + body: str, + cor_id: CorrelationId, + ): + print(cor_id) + ``` diff --git a/search/docs/rabbit/publishing.md b/search/docs/rabbit/publishing.md new file mode 100644 index 0000000..f703695 --- /dev/null +++ b/search/docs/rabbit/publishing.md @@ -0,0 +1,83 @@ +# Publishing + +**FastStream** `RabbitBroker` supports all regular [publishing usecases](../getting-started/publishing/index.md){.internal-link}. you can use them without any changes. + +However, if you wish to further customize the publishing logic further, you should take a more deep-dive look at specific RabbitBroker parameters. + +## Rabbit Publishing + +`RabbitBroker` also uses the unified `publish` method (from a `publisher` object) to send messages. + +However, in this case, an object of the `aio_pika.Message` class (if necessary) can be used as a message (in addition to python primitives and `pydantic.BaseModel`). + +You can specify queue (used as a routing_key) and exchange (optionally) to send by their name. + +``` python +import asyncio +from faststream.rabbit import RabbitBroker + +async def pub(): + async with RabbitBroker() as broker: + await broker.publish( + "Hi!", + queue="test", + exchange="test" + ) + +asyncio.run(pub()) +``` + +If you don't specify any exchange, the message will be send to the default one. + +Also, you are able to use special **RabbitQueue** and **RabbitExchange** objects as `queue` and `exchange` arguments: + +``` python +from faststream.rabbit import RabbitExchange, RabbitQueue + +await broker.publish( + "Hi!", + queue=RabbitQueue("test"), + exchange=RabbitExchange("test") +) +``` + +If you specify exchange that doesn't exist, RabbitBroker will create a required one and then publish a message to it. + +!!! tip + Be accurate with it: if you have already created an **Exchange** with specific parameters and try to send a message by exchange name to it, the broker will try to create it. So, **Exchange** parameters conflict will occur. + + If you are trying to send a message to a specific **Exchange**, sending it with a defined **RabbitExchange** object is the preffered way. + +## Basic Arguments + +The `publish` method takes the following arguments: + +* `#!python message = ""` - message to send +* `#!python exchange: str | RabbitExchange | None = None` - the exchange where the message will be sent to. If not specified - *default* is used +* `#!python queue: str | RabbitQueue = ""` - the queue where the message will be sent (since most queues use their name as the routing key, this is a human-readable version of `routing_key`) +* `#!python routing_key: str = ""` - also a message routing key, if not specified, the `queue` argument will be used + +## Message Parameters + +You can read more about all the available flags in the [RabbitMQ documentation](https://www.rabbitmq.com/consumers.html){.external-link target="_blank"} + +* `#!python headers: dict[str, Any] | None = None` - message headers (used by consumers) +* `#!python content_type: str | None = None` - the content_type of the message being sent (set automatically, used by consumers) +* `#!python content_encoding: str | None = None` - encoding of the message (used by consumers) +* `#!python persist: bool = False` - restore messages on *RabbitMQ* reboot +* `#!python priority: int | None = None` - the priority of the message +* `#!python correlation_id: str | None = None` - message id, which helps to match the original message with the reply to it (generated automatically) +* `#!python message_id: str | None = None` - message ID (generated automatically) +* `#!python timestamp: int | float | time delta | datetime | None = None` - message sending time (set automatically) +* `#!python expiration: int | float | time delta | datetime | None = None` - message lifetime (in seconds) +* `#!python type: str | None = None` - the type of message (used by consumers) +* `#!python user_id: str | None = None` - ID of the *RabbitMQ* user who sent the message +* `#!python app_id: str | None = None` - ID of the application that sent the message (used by consumers) + +## Send Flags + +Arguments for sending a message: + +* `#!python mandatory: bool = True` - the client is waiting for confirmation that the message will be placed in some queue (if there are no queues, return it to the sender) +* `#!python immediate: bool = False` - the client expects that there is a consumer ready to take the message to work "right now" (if there is no consumer, return it to the sender) +* `#!python timeout: int | float | None = None` - send confirmation time from *RabbitMQ* diff --git a/search/docs/rabbit/rpc.md b/search/docs/rabbit/rpc.md new file mode 100644 index 0000000..57704a3 --- /dev/null +++ b/search/docs/rabbit/rpc.md @@ -0,0 +1,42 @@ +# RPC over RMQ + +## Blocking Request + +**FastStream** provides you with the ability to send a blocking RPC request over *RabbitMQ* in a very simple way. + +It uses the [**Direct Reply-To**](https://www.rabbitmq.com/direct-reply-to.html){.external-link target="_blank"} *RabbitMQ* feature, so you don't need to create any queues to consume a response. + +Just send a message like a regular one and get a response synchronously. + +It is very close to common **requests** syntax: + +``` python hl_lines="1 4" +msg = await broker.publish( + "Hi!", + queue="test", + rpc=True, +) +``` + +Also, you have two extra options to control this behavior: + +* `#!python rpc_timeout: Optional[float] = 30.0` - controls how long you are waiting for a response +* `#!python raise_timeout: bool = False` - by default, a timeout request returns `None`, but if you need to raise a `TimeoutException` directly, you can specify this option + +## Reply-To + +Also, if you want to create a permanent request-reply data flow, probably, you should create a permanent queue to consume responses. + +So, if you have such one, you can specify it with the `reply_to` argument. This way, **FastStream** will send a response to this queue automatically. + +``` python hl_lines="1 8" +@broker.subscriber("response-queue") +async def consume_responses(msg): + ... + +msg = await broker.publish( + "Hi!", + queue="test", + reply_to="response-queue", +) +``` diff --git a/search/docs/release.md b/search/docs/release.md new file mode 100644 index 0000000..c094e07 --- /dev/null +++ b/search/docs/release.md @@ -0,0 +1,36 @@ +--- +hide: + - navigation + - footer +--- + +# Release Notes + +**FastStream** is a new package based on the ideas and experiences gained from [FastKafka](https://github.com/airtai/fastkafka){.external-link target="_blank"} and [Propan](https://github.com/lancetnik/propan){.external-link target="_blank"}. By joining our forces, we picked up the best from both packages and created the unified way to write services capable of processing streamed data regradless of the underliying protocol. We'll continue to maintain both packages, but new development will be in this project. If you are starting a new service, this package is the recommended way to do it. + +## Features + +[**FastStream**](https://faststream.airt.ai/) simplifies the process of writing producers and consumers for message queues, handling all the +parsing, networking and documentation generation automatically. + +Making streaming microservices has never been easier. Designed with junior developers in mind, **FastStream** simplifies your work while keeping the door open for more advanced use-cases. Here's a look at the core features that make **FastStream** a go-to framework for modern, data-centric microservices. + +- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ** support) + +- [**Pydantic Validation**](#writing-app-code): Leverage [**Pydantic's**](https://docs.pydantic.dev/){.external-link target="_blank"} validation capabilities to serialize and validates incoming messages + +- [**Automatic Docs**](#project-documentation): Stay ahead with automatic [AsyncAPI](https://www.asyncapi.com/){.external-link target="_blank"} documentation. + +- **Intuitive**: full typed editor support makes your development experience smooth, catching errors before they reach runtime + +- [**Powerful Dependency Injection System**](#dependencies): Manage your service dependencies efficiently with **FastStream**'s built-in DI system. + +- [**Testable**](#testing-the-service): supports in-memory tests, making your CI/CD pipeline faster and more reliable + +- **Extendable**: use extensions for lifespans, custom serialization and middlewares + +- [**Integrations**](#any-framework): **FastStream** is fully compatible with any HTTP framework you want ([**FastAPI**](#fastapi-plugin) especially) + +- **Built for Automatic Code Generation**: **FastStream** is optimized for automatic code generation using advanced models like GPT and Llama + +That's **FastStream** in a nutshell—easy, efficient, and powerful. Whether you're just starting with streaming microservices or looking to scale, **FastStream** has got you covered. diff --git a/search/examples/__init__.py b/search/examples/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_add_and_publish_with_key/__init__.py b/search/examples/example_add_and_publish_with_key/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_add_and_publish_with_key/app.py b/search/examples/example_add_and_publish_with_key/app.py new file mode 100644 index 0000000..bf92ecc --- /dev/null +++ b/search/examples/example_add_and_publish_with_key/app.py @@ -0,0 +1,55 @@ +from datetime import datetime +from typing import List + +from pydantic import BaseModel, Field + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@app.on_startup +async def app_setup(context: ContextRepo): + message_history: List[Point] = [] + context.set_global("message_history", message_history) + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, + logger: Logger, + message_history: List[Point] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + logger.info(f"{msg=}") + + message_history.append(msg) + + x_sum = 0 + y_sum = 0 + for msg in message_history: + x_sum += msg.x + y_sum += msg.y + + point_sum = Point(x=x_sum, y=y_sum, time=datetime.now()) + await to_output_data.publish(point_sum, key=key) diff --git a/search/examples/example_add_and_publish_with_key/app_skeleton.py b/search/examples/example_add_and_publish_with_key/app_skeleton.py new file mode 100644 index 0000000..c123128 --- /dev/null +++ b/search/examples/example_add_and_publish_with_key/app_skeleton.py @@ -0,0 +1,58 @@ +from datetime import datetime +from typing import List + +from pydantic import BaseModel, Field + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@app.on_startup +async def app_setup(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set message_history for storing all input messages + """ + raise NotImplementedError() + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, + logger: Logger, + message_history: List[Point] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + """ + Processes a message from the 'input_data' topic. + Add all x elements from the memory (x_sum) and all y from the memory (y_sum) and publish the message with x_sum and y_sum to the output_data topic. + The same partition key should be used in the input_data and output_data topic. + + Instructions: + 1. Consume a message from 'input_data' topic. + 2. Create a new message object (do not directly modify the original). + 3. Add all x elements from the memory (x_sum) and all y from the memory (y_sum) + 4. Publish the message with x_sum and y_sum to the output_data topic. (The same partition key should be used in the input_data and output_data topic). + """ + raise NotImplementedError() diff --git a/search/examples/example_add_and_publish_with_key/description.txt b/search/examples/example_add_and_publish_with_key/description.txt new file mode 100644 index 0000000..7768996 --- /dev/null +++ b/search/examples/example_add_and_publish_with_key/description.txt @@ -0,0 +1,12 @@ +Develop a FastStream application using localhost kafka broker. +The app should consume messages from the input_data topic. +The input message is a JSON encoded object including two attributes: + - x: float + - y: float + - time: datetime + +input_data topic should use partition key. + +Keep all the previous messages in the memory. +While consuming the message, add all x elements from the memory (x_sum) and all y from the memory (y_sum) and publish the message with x_sum and y_sum to the output_data topic. +The same partition key should be used in the input_data and output_data topic. diff --git a/search/examples/example_add_and_publish_with_key/test_app.py b/search/examples/example_add_and_publish_with_key/test_app.py new file mode 100644 index 0000000..db9741a --- /dev/null +++ b/search/examples/example_add_and_publish_with_key/test_app.py @@ -0,0 +1,33 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from faststream import Context, TestApp +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import Point, app, broker + + +@broker.subscriber("output_data") +async def on_output_data(msg: Point, key: bytes = Context("message.raw_message.key")): + pass + + +# Feeze time so the datetime always uses the same time +@freeze_time("2023-01-01") +@pytest.mark.asyncio +async def test_point_was_incremented(): + async with TestKafkaBroker(broker): + async with TestApp(app): + time = datetime.now() + await broker.publish( + Point(x=1.0, y=2.0, time=time), "input_data", key=b"point_key" + ) + await broker.publish( + Point(x=1.0, y=2.0, time=time), "input_data", key=b"point_key" + ) + + point_json = model_to_jsonable(Point(x=2.0, y=4.0, time=time)) + on_output_data.mock.assert_called_with(point_json) diff --git a/search/examples/example_add_and_publish_with_key2/__init__.py b/search/examples/example_add_and_publish_with_key2/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_add_and_publish_with_key2/app.py b/search/examples/example_add_and_publish_with_key2/app.py new file mode 100644 index 0000000..1735051 --- /dev/null +++ b/search/examples/example_add_and_publish_with_key2/app.py @@ -0,0 +1,54 @@ +from typing import List + +from pydantic import BaseModel, Field + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@app.on_startup +async def app_setup(context: ContextRepo): + message_history: List[Point] = [] + context.set_global("message_history", message_history) + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, + logger: Logger, + message_history: List[Point] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + logger.info(f"{msg=}") + + message_history.append(msg) + + if len(message_history) > 100: + message_history.pop(0) + + last_100_messages = message_history[-10:] + + x_sum = 0 + y_sum = 0 + for msg in last_100_messages: + x_sum += msg.x + y_sum += msg.y + + point_sum = Point(x=x_sum, y=y_sum) + await to_output_data.publish(point_sum, key=key) diff --git a/search/examples/example_add_and_publish_with_key2/app_skeleton.py b/search/examples/example_add_and_publish_with_key2/app_skeleton.py new file mode 100644 index 0000000..e118f64 --- /dev/null +++ b/search/examples/example_add_and_publish_with_key2/app_skeleton.py @@ -0,0 +1,54 @@ +from typing import List + +from pydantic import BaseModel, Field + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@app.on_startup +async def app_setup(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set message_history for storing all input messages + """ + raise NotImplementedError() + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, + logger: Logger, + message_history: List[Point] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + """ + Processes a message from the 'input_data' topic. + Keep only the last 100 messages in memory. + Add all x elements from the memory (x_sum) and all y from the memory (y_sum) and publish the message with x_sum and y_sum to the output_data topic. + The same partition key should be used in the input_data and output_data topic. + + Instructions: + 1. Consume a message from 'input_data' topic. + 2. Keep only the last 100 messages in memory. + 3. Create a new message object (do not directly modify the original). + 4. Add all x elements from the memory (x_sum) and all y from the memory (y_sum) + 5. Publish the message with x_sum and y_sum to the output_data topic. (The same partition key should be used in the input_data and output_data topic). + """ + raise NotImplementedError() diff --git a/search/examples/example_add_and_publish_with_key2/description.txt b/search/examples/example_add_and_publish_with_key2/description.txt new file mode 100644 index 0000000..caf61b3 --- /dev/null +++ b/search/examples/example_add_and_publish_with_key2/description.txt @@ -0,0 +1,11 @@ +Develop a FastStream application using localhost kafka broker. +The app should consume messages from the input_data topic. +The input message is a JSON encoded object including two attributes: + - x: float + - y: float + +input_data topic should use partition key. + +Keep only the last 100 messages in memory. +While consuming the message, add all x elements from the memory (x_sum) and all y from the memory (y_sum) and publish the message with x_sum and y_sum to the output_data topic. +The same partition key should be used in the input_data and output_data topic. diff --git a/search/examples/example_add_and_publish_with_key2/test_app.py b/search/examples/example_add_and_publish_with_key2/test_app.py new file mode 100644 index 0000000..028244f --- /dev/null +++ b/search/examples/example_add_and_publish_with_key2/test_app.py @@ -0,0 +1,21 @@ +import pytest + +from faststream import Context, TestApp +from faststream.kafka import TestKafkaBroker + +from .app import Point, app, broker + + +@broker.subscriber("output_data") +async def on_output_data(msg: Point, key: bytes = Context("message.raw_message.key")): + pass + + +@pytest.mark.asyncio +async def test_point_was_incremented(): + async with TestKafkaBroker(broker): + async with TestApp(app): + await broker.publish(Point(x=1.0, y=2.0), "input_data", key=b"point_key") + await broker.publish(Point(x=1.0, y=2.0), "input_data", key=b"point_key") + + on_output_data.mock.assert_called_with(dict(Point(x=2.0, y=4.0))) diff --git a/search/examples/example_calculate_mean_temperature/__init__.py b/search/examples/example_calculate_mean_temperature/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_calculate_mean_temperature/app.py b/search/examples/example_calculate_mean_temperature/app.py new file mode 100644 index 0000000..1498fce --- /dev/null +++ b/search/examples/example_calculate_mean_temperature/app.py @@ -0,0 +1,53 @@ +from datetime import datetime +from statistics import mean +from typing import Dict, List + +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +class Weather(BaseModel): + temperature: float = Field( + ..., examples=[20], description="Temperature in Celsius degrees" + ) + windspeed: NonNegativeFloat = Field( + ..., examples=[20], description="Wind speed in kilometers per hour" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +publisher = broker.publisher("temperature_mean") + + +@app.on_startup +async def app_setup(context: ContextRepo): + message_history: Dict[str, List[float]] = {} + context.set_global("message_history", message_history) + + +@broker.subscriber("weather") +async def on_weather( + msg: Weather, + logger: Logger, + message_history: Dict[str, List[float]] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + logger.info(f"Weather info {msg=}") + + weather_key = key.decode("utf-8") + if weather_key not in message_history: + message_history[weather_key] = [] + + message_history[weather_key].append(msg.temperature) + + mean_temperature = mean(message_history[weather_key][-5:]) + await publisher.publish(mean_temperature, key=key) diff --git a/search/examples/example_calculate_mean_temperature/app_skeleton.py b/search/examples/example_calculate_mean_temperature/app_skeleton.py new file mode 100644 index 0000000..e26adc7 --- /dev/null +++ b/search/examples/example_calculate_mean_temperature/app_skeleton.py @@ -0,0 +1,57 @@ +from datetime import datetime +from typing import Dict, List + +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import Context, ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +class Weather(BaseModel): + temperature: float = Field( + ..., examples=[20], description="Temperature in Celsius degrees" + ) + windspeed: NonNegativeFloat = Field( + ..., examples=[20], description="Wind speed in kilometers per hour" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +publisher = broker.publisher("temperature_mean") + + +@app.on_startup +async def app_setup(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set message_history for storing all input messages + """ + raise NotImplementedError() + + +@broker.subscriber("weather") +async def on_weather( + msg: Weather, + logger: Logger, + message_history: Dict[str, List[float]] = Context(), + key: bytes = Context("message.raw_message.key"), +) -> None: + """ + Processes a message from the 'weather' topic. This topic uses partition key. + Calculate the temperature mean of the last 5 messages for the given partition key + Publish the temperature price mean to the temperature_mean topic and use the same partition key which the weather topic is using. + + Instructions: + 1. Consume a message from 'weather' topic. + 2. Save each message to a dictionary (global variable) - partition key should be usded as a dictionary key and value should be a List of temperatures. + 3. Calculate the temperature mean of the last 5 messages for the given partition key + 4. Publish the temperature price mean to the temperature_mean topic and use the same partition key which the weather topic is using. + """ + raise NotImplementedError() diff --git a/search/examples/example_calculate_mean_temperature/description.txt b/search/examples/example_calculate_mean_temperature/description.txt new file mode 100644 index 0000000..aa70dbb --- /dev/null +++ b/search/examples/example_calculate_mean_temperature/description.txt @@ -0,0 +1,11 @@ +Create faststream application for consuming messages from the weather topic. +This topic needs to use partition key. + +weather messages use JSON with two attributes: + - temperature (type float) + - windspeed (type float) + - timestamp (type datetime) + +Application should save each message to a dictionary (global variable) - partition key should be usded as a dictionary key and value should be a List of temperatures. +Calculate the temperature mean of the last 5 messages for the given partition key +Publish the temperature price mean to the temperature_mean topic and use the same partition key which the weather topic is using. diff --git a/search/examples/example_calculate_mean_temperature/test_app.py b/search/examples/example_calculate_mean_temperature/test_app.py new file mode 100644 index 0000000..ddf934a --- /dev/null +++ b/search/examples/example_calculate_mean_temperature/test_app.py @@ -0,0 +1,47 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from faststream import Context, TestApp +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import Weather, app, broker, on_weather + + +@broker.subscriber("temperature_mean") +async def on_temperature_mean( + msg: float, key: bytes = Context("message.raw_message.key") +): + pass + + +# Feeze time so the datetime always uses the same time +@freeze_time("2023-01-01") +@pytest.mark.asyncio +async def test_point_was_incremented(): + async with TestKafkaBroker(broker): + async with TestApp(app): + timestamp = datetime.now() + await broker.publish( + Weather(temperature=20.5, windspeed=20, timestamp=timestamp), + "weather", + key=b"ZG", + ) + weather_json = model_to_jsonable( + Weather(temperature=20.5, windspeed=20, timestamp=timestamp) + ) + on_weather.mock.assert_called_with(weather_json) + + await broker.publish( + Weather(temperature=10.5, windspeed=20, timestamp=timestamp), + "weather", + key=b"ZG", + ) + weather_json = model_to_jsonable( + Weather(temperature=10.5, windspeed=20, timestamp=timestamp) + ) + on_weather.mock.assert_called_with(weather_json) + + on_temperature_mean.mock.assert_called_with(15.5) diff --git a/search/examples/example_consume_publish_with_key/__init__.py b/search/examples/example_consume_publish_with_key/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_consume_publish_with_key/app.py b/search/examples/example_consume_publish_with_key/app.py new file mode 100644 index 0000000..dec320b --- /dev/null +++ b/search/examples/example_consume_publish_with_key/app.py @@ -0,0 +1,36 @@ +from datetime import datetime + +from pydantic import BaseModel, Field + +from faststream import Context, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, logger: Logger, key: bytes = Context("message.raw_message.key") +) -> None: + logger.info(f"{msg=}") + incremented_point = Point(x=msg.x + 1, y=msg.y + 1, time=datetime.now()) + await to_output_data.publish(incremented_point, key=key) diff --git a/search/examples/example_consume_publish_with_key/app_skeleton.py b/search/examples/example_consume_publish_with_key/app_skeleton.py new file mode 100644 index 0000000..15138a1 --- /dev/null +++ b/search/examples/example_consume_publish_with_key/app_skeleton.py @@ -0,0 +1,45 @@ +from datetime import datetime + +from pydantic import BaseModel, Field + +from faststream import Context, FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@broker.subscriber("input_data") +async def on_input_data( + msg: Point, logger: Logger, key: bytes = Context("message.raw_message.key") +) -> None: + """ + Processes a message from the 'input_data' topic. + Increment msg x and y attributes with 1 and publish that message to the output_data topic. + The same partition key should be used in the input_data and output_data topic. + + Instructions: + 1. Consume a message from 'input_data' topic. + 2. Create a new message object (do not directly modify the original). + 3. Increment msg x and y attributes with 1. + 4. Publish that message to the output_data topic (The same partition key should be used in the input_data and output_data topic). + """ + raise NotImplementedError() diff --git a/search/examples/example_consume_publish_with_key/description.txt b/search/examples/example_consume_publish_with_key/description.txt new file mode 100644 index 0000000..29fd9aa --- /dev/null +++ b/search/examples/example_consume_publish_with_key/description.txt @@ -0,0 +1,10 @@ +Develop a FastStream application using localhost kafka broker. +The app should consume messages from the input_data topic. +The input message is a JSON encoded object including two attributes: + - x: float + - y: float + - time: datetime + +input_data topic should use partition key. +While consuming the message, increment x and y attributes by 1 and publish that message to the output_data topic. +The same partition key should be used in the input_data and output_data topic. diff --git a/search/examples/example_consume_publish_with_key/test_app.py b/search/examples/example_consume_publish_with_key/test_app.py new file mode 100644 index 0000000..6e50e72 --- /dev/null +++ b/search/examples/example_consume_publish_with_key/test_app.py @@ -0,0 +1,30 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from faststream import Context +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import Point, broker, on_input_data + + +@broker.subscriber("output_data") +async def on_output_data(msg: Point, key: bytes = Context("message.raw_message.key")): + pass + + +# Feeze time so the datetime always uses the same time +@freeze_time("2023-01-01") +@pytest.mark.asyncio +async def test_point_was_incremented(): + async with TestKafkaBroker(broker): + time = datetime.now() + await broker.publish(Point(x=1.0, y=2.0, time=time), "input_data", key=b"key") + + point_json = model_to_jsonable(Point(x=1.0, y=2.0, time=time)) + on_input_data.mock.assert_called_with(point_json) + + point_json = model_to_jsonable(Point(x=2.0, y=3.0, time=time)) + on_output_data.mock.assert_called_with(point_json) diff --git a/search/examples/example_course_updates/__init__.py b/search/examples/example_course_updates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_course_updates/app.py b/search/examples/example_course_updates/app.py new file mode 100644 index 0000000..8516bca --- /dev/null +++ b/search/examples/example_course_updates/app.py @@ -0,0 +1,38 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class CourseUpdates(BaseModel): + course_name: str = Field(..., examples=["Biology"], description="Course example") + new_content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("notify_updates") +@broker.subscriber("course_updates") +async def on_course_update(msg: CourseUpdates, logger: Logger) -> CourseUpdates: + logger.info(msg) + + if msg.new_content: + logger.info(f"Course has new content {msg.new_content=}") + msg = CourseUpdates( + course_name=("Updated: " + msg.course_name), + new_content=msg.new_content, + timestamp=datetime.now(), + ) + return msg diff --git a/search/examples/example_course_updates/app_skeleton.py b/search/examples/example_course_updates/app_skeleton.py new file mode 100644 index 0000000..30fbe55 --- /dev/null +++ b/search/examples/example_course_updates/app_skeleton.py @@ -0,0 +1,40 @@ +from datetime import datetime +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class CourseUpdates(BaseModel): + course_name: str = Field(..., examples=["Biology"], description="Course example") + new_content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("notify_updates") +@broker.subscriber("course_updates") +async def on_course_update(msg: CourseUpdates, logger: Logger) -> CourseUpdates: + """ + Processes a message from the 'course_updates' topic, If new_content attribute is set, then constructs a new message appending 'Updated: ' before the course_name attribute. + Finally, publishes the message to the 'notify_updates' topic. + + Instructions: + 1. Consume a message from 'course_updates' topic. + 2. Create a new message object (do not directly modify the original). + 3. Processes a message from the 'course_updates' topic, If new_content attribute is set, then constructs a new message appending 'Updated: ' before the course_name attribute. + 4. Publish the modified message to 'notify_updates' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_course_updates/description.txt b/search/examples/example_course_updates/description.txt new file mode 100644 index 0000000..76a216b --- /dev/null +++ b/search/examples/example_course_updates/description.txt @@ -0,0 +1,4 @@ +Develop a FastStream application using localhost broker. +It should consume messages from 'course_updates' topic where the message is a JSON encoded object including three attributes: course_name, new_content and timestamp. +If new_content attribute is set, then construct a new message appending 'Updated: ' before the course_name attribute. +Finally, publish this message to the 'notify_updates' topic. diff --git a/search/examples/example_course_updates/test_app.py b/search/examples/example_course_updates/test_app.py new file mode 100644 index 0000000..20a7936 --- /dev/null +++ b/search/examples/example_course_updates/test_app.py @@ -0,0 +1,64 @@ +from datetime import datetime + +import pytest +from freezegun import freeze_time + +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import CourseUpdates, broker, on_course_update + + +@broker.subscriber("notify_updates") +async def on_notify_update(msg: CourseUpdates): + pass + + +# Feeze time so the datetime always uses the same time +@freeze_time("2023-01-01") +@pytest.mark.asyncio +async def test_app_without_new_content(): + async with TestKafkaBroker(broker): + timestamp = datetime.now() + await broker.publish( + CourseUpdates(course_name="Biology", timestamp=timestamp), "course_updates" + ) + + course_json = model_to_jsonable( + CourseUpdates(course_name="Biology", timestamp=timestamp) + ) + on_course_update.mock.assert_called_with(course_json) + on_notify_update.mock.assert_called_with(course_json) + + +# Feeze time so the datetime always uses the same time +@freeze_time("2023-01-01") +@pytest.mark.asyncio +async def test_app_with_new_content(): + async with TestKafkaBroker(broker): + timestamp = datetime.now() + await broker.publish( + CourseUpdates( + course_name="Biology", + new_content="We have additional classes...", + timestamp=timestamp, + ), + "course_updates", + ) + course_json = model_to_jsonable( + CourseUpdates( + course_name="Biology", + new_content="We have additional classes...", + timestamp=timestamp, + ) + ) + on_course_update.mock.assert_called_with(course_json) + + on_update_json = model_to_jsonable( + CourseUpdates( + course_name="Updated: Biology", + new_content="We have additional classes...", + timestamp=timestamp, + ) + ) + on_notify_update.mock.assert_called_with(on_update_json) diff --git a/search/examples/example_execute_trade/__init__.py b/search/examples/example_execute_trade/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_execute_trade/app.py b/search/examples/example_execute_trade/app.py new file mode 100644 index 0000000..ce6fb6e --- /dev/null +++ b/search/examples/example_execute_trade/app.py @@ -0,0 +1,33 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Trade(BaseModel): + trader_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + stock_symbol: str = Field(..., examples=["WS"], description="Stock example") + action: str = Field(..., examples=["Sell!!!"], description="Action example") + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_order_executed = broker.publisher("order_executed") + + +@broker.subscriber("execute_trade") +async def on_execute_trade(msg: Trade, logger: Logger) -> None: + logger.info(msg) + + if "Sell" in msg.action: + # price = retrieve_the_current_price(msg) + # Currently using random price + price = 5 + await to_order_executed.publish( + Trade( + trader_id=msg.trader_id, + stock_symbol=msg.stock_symbol, + action=(msg.action + f" Price = {price}"), + ) + ) diff --git a/search/examples/example_execute_trade/app_skeleton.py b/search/examples/example_execute_trade/app_skeleton.py new file mode 100644 index 0000000..774492f --- /dev/null +++ b/search/examples/example_execute_trade/app_skeleton.py @@ -0,0 +1,32 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Trade(BaseModel): + trader_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + stock_symbol: str = Field(..., examples=["WS"], description="Stock example") + action: str = Field(..., examples=["Sell!!!"], description="Action example") + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_order_executed = broker.publisher("order_executed") + + +@broker.subscriber("execute_trade") +async def on_execute_trade(msg: Trade, logger: Logger) -> None: + """ + Processes a message from the 'execute_trade' topic. + Upon reception, the function should verify if the action attribute contains 'Sell'. If yes, retrieve the current price and append this detail to the message and publish the updated message to the 'order_executed' topic. + + Instructions: + 1. Consume a message from 'execute_trade' topic. + 2. Create a new message object (do not directly modify the original). + 3. Check if the action attribute contains 'Sell'. + 4. If 3. is True, retrieve the current price and append this detail to the message and publish the updated message to the 'order_executed' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_execute_trade/description.txt b/search/examples/example_execute_trade/description.txt new file mode 100644 index 0000000..1f0b588 --- /dev/null +++ b/search/examples/example_execute_trade/description.txt @@ -0,0 +1,3 @@ +Develop a FastStream application with localhost broker for development. +The application should consume from the 'execute_trade' topic with messages including attributes: trader_id, stock_symbol, and action. +Upon reception, the function should verify if the action attribute contains 'Sell'. If yes, retrieve the current price and append this detail to the message and publish the updated message to the 'order_executed' topic. diff --git a/search/examples/example_execute_trade/test_app.py b/search/examples/example_execute_trade/test_app.py new file mode 100644 index 0000000..0bb5db2 --- /dev/null +++ b/search/examples/example_execute_trade/test_app.py @@ -0,0 +1,36 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Trade, broker, on_execute_trade + + +@broker.subscriber("order_executed") +async def on_order_executed(msg: Trade) -> None: + pass + + +@pytest.mark.asyncio +async def test_app_without_sell_action(): + async with TestKafkaBroker(broker): + await broker.publish( + Trade(trader_id=1, stock_symbol="WS", action="Nothing"), "execute_trade" + ) + on_execute_trade.mock.assert_called_with( + dict(Trade(trader_id=1, stock_symbol="WS", action="Nothing")) + ) + on_order_executed.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_app_with_sell_action(): + async with TestKafkaBroker(broker): + await broker.publish( + Trade(trader_id=1, stock_symbol="WS", action="Sell!"), "execute_trade" + ) + on_execute_trade.mock.assert_called_with( + dict(Trade(trader_id=1, stock_symbol="WS", action="Sell!")) + ) + on_order_executed.mock.assert_called_with( + dict(Trade(trader_id=1, stock_symbol="WS", action="Sell! Price = 5")) + ) diff --git a/search/examples/example_forward_to_another_topic/__init__.py b/search/examples/example_forward_to_another_topic/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_forward_to_another_topic/app.py b/search/examples/example_forward_to_another_topic/app.py new file mode 100644 index 0000000..7d9213d --- /dev/null +++ b/search/examples/example_forward_to_another_topic/app.py @@ -0,0 +1,24 @@ +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("document_backup") +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger) -> Document: + logger.info(msg) + return msg diff --git a/search/examples/example_forward_to_another_topic/app_skeleton.py b/search/examples/example_forward_to_another_topic/app_skeleton.py new file mode 100644 index 0000000..e7577f2 --- /dev/null +++ b/search/examples/example_forward_to_another_topic/app_skeleton.py @@ -0,0 +1,31 @@ +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("document_backup") +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger) -> Document: + """ + Processes a message from the 'document' topic and publishes the same message to the 'document_backup' topic. + + Instructions: + 1. Consume a message from 'document' topic. + 2. Publish the same message to 'document_backup' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_forward_to_another_topic/description.txt b/search/examples/example_forward_to_another_topic/description.txt new file mode 100644 index 0000000..4cd2643 --- /dev/null +++ b/search/examples/example_forward_to_another_topic/description.txt @@ -0,0 +1,2 @@ +Simple FastStream application which only forwards all messages from the 'document' topic to the 'document_backup' topic. +Each Document has two attributes: name and content. diff --git a/search/examples/example_forward_to_another_topic/test_app.py b/search/examples/example_forward_to_another_topic/test_app.py new file mode 100644 index 0000000..5b44b32 --- /dev/null +++ b/search/examples/example_forward_to_another_topic/test_app.py @@ -0,0 +1,24 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Document, broker, on_document + + +@broker.subscriber("document_backup") +async def on_document_backup(msg: Document): + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish( + Document(name="doc.txt", content="Introduction to FastStream"), "document" + ) + on_document.mock.assert_called_with( + dict(Document(name="doc.txt", content="Introduction to FastStream")) + ) + on_document_backup.mock.assert_called_with( + dict(Document(name="doc.txt", content="Introduction to FastStream")) + ) diff --git a/search/examples/example_forward_to_multiple_topics/__init__.py b/search/examples/example_forward_to_multiple_topics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_forward_to_multiple_topics/app.py b/search/examples/example_forward_to_multiple_topics/app.py new file mode 100644 index 0000000..b1a4605 --- /dev/null +++ b/search/examples/example_forward_to_multiple_topics/app.py @@ -0,0 +1,14 @@ +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("output_1") +@broker.publisher("output_2") +@broker.publisher("output_3") +@broker.subscriber("input") +async def on_input(msg: str, logger: Logger) -> str: + logger.info(msg) + return msg diff --git a/search/examples/example_forward_to_multiple_topics/app_skeleton.py b/search/examples/example_forward_to_multiple_topics/app_skeleton.py new file mode 100644 index 0000000..56d19fb --- /dev/null +++ b/search/examples/example_forward_to_multiple_topics/app_skeleton.py @@ -0,0 +1,20 @@ +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("output_1") +@broker.publisher("output_2") +@broker.publisher("output_3") +@broker.subscriber("input") +async def on_input(msg: str, logger: Logger) -> str: + """ + Processes a message from the 'input' topic and publishes the same message to the 'output_1', 'output_2' and 'output_3' topic. + + Instructions: + 1. Consume a message from 'input' topic. + 2. Publish the same message to 'output_1', 'output_2' and 'output_3' topic. + """ + raise NotImplementedError() diff --git a/search/examples/example_forward_to_multiple_topics/description.txt b/search/examples/example_forward_to_multiple_topics/description.txt new file mode 100644 index 0000000..ea6f81c --- /dev/null +++ b/search/examples/example_forward_to_multiple_topics/description.txt @@ -0,0 +1,2 @@ +Simple FastStream application which only forwards all messages from the 'input' topic to the 'output_1', 'output_2' and 'output_3' topic. +Each message is a string diff --git a/search/examples/example_forward_to_multiple_topics/test_app.py b/search/examples/example_forward_to_multiple_topics/test_app.py new file mode 100644 index 0000000..c6d1676 --- /dev/null +++ b/search/examples/example_forward_to_multiple_topics/test_app.py @@ -0,0 +1,29 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import broker + + +@broker.subscriber("output_1") +async def on_output_1(msg: str): + pass + + +@broker.subscriber("output_2") +async def on_output_2(msg: str): + pass + + +@broker.subscriber("output_3") +async def on_output_3(msg: str): + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish("input string", "input") + on_output_1.mock.assert_called_once_with("input string") + on_output_2.mock.assert_called_once_with("input string") + on_output_3.mock.assert_called_once_with("input string") diff --git a/search/examples/example_forward_with_security/__init__.py b/search/examples/example_forward_with_security/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_forward_with_security/app.py b/search/examples/example_forward_with_security/app.py new file mode 100644 index 0000000..a495381 --- /dev/null +++ b/search/examples/example_forward_with_security/app.py @@ -0,0 +1,28 @@ +import ssl +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import Logger +from faststream.broker.security import BaseSecurity +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +ssl_context = ssl.create_default_context() +security = BaseSecurity(ssl_context=ssl_context) + +broker = KafkaBroker("localhost:9092", security=security) + + +@broker.publisher("document_backup") +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger) -> Document: + logger.info(msg) + return msg diff --git a/search/examples/example_forward_with_security/app_skeleton.py b/search/examples/example_forward_with_security/app_skeleton.py new file mode 100644 index 0000000..33e26d9 --- /dev/null +++ b/search/examples/example_forward_with_security/app_skeleton.py @@ -0,0 +1,36 @@ +import ssl +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import BaseSecurity +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +ssl_context = ssl.create_default_context() +security = BaseSecurity(ssl_context=ssl_context) + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + + +@broker.publisher("document_backup") +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger) -> Document: + """ + Processes a message from the 'document' topic and publishes the same message to the 'document_backup' topic. + + Instructions: + 1. Consume a message from 'document' topic. + 2. Publish the same message to 'document_backup' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_forward_with_security/description.txt b/search/examples/example_forward_with_security/description.txt new file mode 100644 index 0000000..b058d36 --- /dev/null +++ b/search/examples/example_forward_with_security/description.txt @@ -0,0 +1,3 @@ +Simple FastStream application which only forwards all messages from the 'document' topic to the 'document_backup' topic. +Each Document has two attributes: name and content. +The communication with the broker is encrypted with ssl. diff --git a/search/examples/example_forward_with_security/test_app.py b/search/examples/example_forward_with_security/test_app.py new file mode 100644 index 0000000..5b44b32 --- /dev/null +++ b/search/examples/example_forward_with_security/test_app.py @@ -0,0 +1,24 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Document, broker, on_document + + +@broker.subscriber("document_backup") +async def on_document_backup(msg: Document): + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish( + Document(name="doc.txt", content="Introduction to FastStream"), "document" + ) + on_document.mock.assert_called_with( + dict(Document(name="doc.txt", content="Introduction to FastStream")) + ) + on_document_backup.mock.assert_called_with( + dict(Document(name="doc.txt", content="Introduction to FastStream")) + ) diff --git a/search/examples/example_infinity_publishing/__init__.py b/search/examples/example_infinity_publishing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_infinity_publishing/app.py b/search/examples/example_infinity_publishing/app.py new file mode 100644 index 0000000..c97de17 --- /dev/null +++ b/search/examples/example_infinity_publishing/app.py @@ -0,0 +1,45 @@ +import asyncio +from datetime import datetime + +from faststream import ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +publisher = broker.publisher("current_time") + + +@app.on_startup +async def app_setup(context: ContextRepo): + context.set_global("app_is_running", True) + + +@app.on_shutdown +async def app_shutdown(context: ContextRepo): + context.set_global("app_is_running", False) + + # Get the running task and await for it to finish + publish_task = context.get("publish_task") + await publish_task + + +async def publish_time_task( + logger: Logger, context: ContextRepo, time_interval: int = 5 +): + # Always use context: ContextRepo for storing app_is_running variable + while context.get("app_is_running"): + current_time = datetime.now() + await publisher.publish(current_time.isoformat()) + logger.info(f"Current time published: {current_time}") + await asyncio.sleep(time_interval) + + +@app.after_startup +async def publish_time(logger: Logger, context: ContextRepo): + logger.info("Starting publishing:") + + publish_task = asyncio.create_task(publish_time_task(logger, context)) + + # you need to save asyncio task so you can wait for it to finish at app shutdown (the function with @app.on_shutdown function) + context.set_global("publish_task", publish_task) diff --git a/search/examples/example_infinity_publishing/app_skeleton.py b/search/examples/example_infinity_publishing/app_skeleton.py new file mode 100644 index 0000000..0b811af --- /dev/null +++ b/search/examples/example_infinity_publishing/app_skeleton.py @@ -0,0 +1,47 @@ +from faststream import ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +publisher = broker.publisher("current_time") + + +@app.on_startup +async def app_setup(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set app_is_running to True - we will use this variable as running loop condition + """ + raise NotImplementedError() + + +@app.on_shutdown +async def app_shutdown(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set app_is_running to False + + Get executed task from context and wait for it to finish + """ + raise NotImplementedError() + + +async def publish_time_task( + logger: Logger, context: ContextRepo, time_interval: int = 5 +): + """ + While app_is_running variable inside context is True, repeat the following process: + publish the current time to the 'current_time' topic. + asynchronous sleep for time_interval + """ + raise NotImplementedError() + + +@app.after_startup +async def publish_time(logger: Logger, context: ContextRepo): + """ + Create asynchronous task for executing publish_time_task function. + Save asyncio task so you can wait for it to finish at app shutdown (the function with @app.on_shutdown function) + """ + raise NotImplementedError() diff --git a/search/examples/example_infinity_publishing/description.txt b/search/examples/example_infinity_publishing/description.txt new file mode 100644 index 0000000..5a35053 --- /dev/null +++ b/search/examples/example_infinity_publishing/description.txt @@ -0,0 +1,2 @@ +Develop a simple FastStream application which publishes the current time to the 'current_time' topic. +App should publish messages every five seconds until the app shuts down. diff --git a/search/examples/example_infinity_publishing/test_app.py b/search/examples/example_infinity_publishing/test_app.py new file mode 100644 index 0000000..135187d --- /dev/null +++ b/search/examples/example_infinity_publishing/test_app.py @@ -0,0 +1,21 @@ +from datetime import datetime + +import pytest + +from faststream import TestApp +from faststream.kafka import TestKafkaBroker + +from .app import app, broker + + +@broker.subscriber("current_time") +async def on_current_time(msg: datetime): + pass + + +@pytest.mark.asyncio +async def test_message_was_published(): + async with TestKafkaBroker(broker): + async with TestApp(app): + await on_current_time.wait_call(3) + on_current_time.mock.assert_called() diff --git a/search/examples/example_investment_updates/__init__.py b/search/examples/example_investment_updates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_investment_updates/app.py b/search/examples/example_investment_updates/app.py new file mode 100644 index 0000000..1619530 --- /dev/null +++ b/search/examples/example_investment_updates/app.py @@ -0,0 +1,32 @@ +from pydantic import BaseModel, Field, NonNegativeFloat, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Investment(BaseModel): + investor_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + investment_amount: NonNegativeFloat = Field( + ..., examples=[100.5], description="Float data example" + ) + portfolio_value: NonNegativeFloat = Field( + ..., examples=[1000.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_risk_management = broker.publisher("risk_management") + + +@broker.subscriber("investment_updates") +async def on_investment_updates(msg: Investment, logger: Logger) -> None: + logger.info(msg) + + default_trashold = 1000 + + if msg.investment_amount > default_trashold: + await to_risk_management.publish(msg) diff --git a/search/examples/example_investment_updates/app_skeleton.py b/search/examples/example_investment_updates/app_skeleton.py new file mode 100644 index 0000000..b4f9090 --- /dev/null +++ b/search/examples/example_investment_updates/app_skeleton.py @@ -0,0 +1,38 @@ +from pydantic import BaseModel, Field, NonNegativeFloat, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Investment(BaseModel): + investor_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + investment_amount: NonNegativeFloat = Field( + ..., examples=[100.5], description="Float data example" + ) + portfolio_value: NonNegativeFloat = Field( + ..., examples=[1000.5], description="Float data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_risk_management = broker.publisher("risk_management") + + +@broker.subscriber("investment_updates") +async def on_investment_updates(msg: Investment, logger: Logger) -> None: + """ + Processes a message from the 'investment_updates' topic. + Upon reception, the function should verify if the investment_amount exceeds a predetermined threshold (default treshold is 1000). + If yes, forward the message to the 'risk_management' topic. + + Instructions: + 1. Consume a message from 'investment_updates' topic. + 2. Check if the investment_amount exceeds a predetermined threshold (default treshold is 1000). + 3. If 2. is True, forward the message to the 'risk_management' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_investment_updates/description.txt b/search/examples/example_investment_updates/description.txt new file mode 100644 index 0000000..d841174 --- /dev/null +++ b/search/examples/example_investment_updates/description.txt @@ -0,0 +1,3 @@ +Develop a FastStream application which consumes messages from the 'investment_updates' topic. +The consumed message has following attributes: investor_id, investment_amount, and portfolio_value. +If investment_amount exceeds a predetermined threshold (default treshold is 1000), forward the message to the 'risk_management' topic for further investigation. diff --git a/search/examples/example_investment_updates/test_app.py b/search/examples/example_investment_updates/test_app.py new file mode 100644 index 0000000..e18a5a9 --- /dev/null +++ b/search/examples/example_investment_updates/test_app.py @@ -0,0 +1,42 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Investment, broker, on_investment_updates + + +@broker.subscriber("risk_management") +async def on_risk_management(msg: Investment) -> None: + pass + + +@pytest.mark.asyncio +async def test_invest_smaller_amount_than_threshold(): + async with TestKafkaBroker(broker): + await broker.publish( + Investment(investor_id=1, investment_amount=100, portfolio_value=1000), + "investment_updates", + ) + on_investment_updates.mock.assert_called_with( + dict(Investment(investor_id=1, investment_amount=100, portfolio_value=1000)) + ) + on_risk_management.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_invest_grater_amount_than_threshold(): + async with TestKafkaBroker(broker): + await broker.publish( + Investment(investor_id=1, investment_amount=1500, portfolio_value=1000), + "investment_updates", + ) + on_investment_updates.mock.assert_called_with( + dict( + Investment(investor_id=1, investment_amount=1500, portfolio_value=1000) + ) + ) + on_risk_management.mock.assert_called_with( + dict( + Investment(investor_id=1, investment_amount=1500, portfolio_value=1000) + ) + ) diff --git a/search/examples/example_log_msgs_with_plaintext_security/__init__.py b/search/examples/example_log_msgs_with_plaintext_security/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_log_msgs_with_plaintext_security/app.py b/search/examples/example_log_msgs_with_plaintext_security/app.py new file mode 100644 index 0000000..f619e0b --- /dev/null +++ b/search/examples/example_log_msgs_with_plaintext_security/app.py @@ -0,0 +1,27 @@ +import ssl +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLPlaintext +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +ssl_context = ssl.create_default_context() +security = SASLPlaintext(ssl_context=ssl_context, username="admin", password="admin") + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + + +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger): + logger.info(msg) diff --git a/search/examples/example_log_msgs_with_plaintext_security/app_skeleton.py b/search/examples/example_log_msgs_with_plaintext_security/app_skeleton.py new file mode 100644 index 0000000..e7b0b10 --- /dev/null +++ b/search/examples/example_log_msgs_with_plaintext_security/app_skeleton.py @@ -0,0 +1,34 @@ +import ssl +from typing import Optional + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLPlaintext +from faststream.kafka import KafkaBroker + + +class Document(BaseModel): + name: str = Field(..., examples=["doc_name.txt"], description="Name example") + content: Optional[str] = Field( + default=None, examples=["New content"], description="Content example" + ) + + +ssl_context = ssl.create_default_context() +security = SASLPlaintext(ssl_context=ssl_context, username="admin", password="admin") + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + + +@broker.subscriber("document") +async def on_document(msg: Document, logger: Logger): + """ + Processes a message from the 'document' topic and publishes the same message to the 'document_backup' topic. + + Instructions: + 1. Consume a message from 'document' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_log_msgs_with_plaintext_security/description.txt b/search/examples/example_log_msgs_with_plaintext_security/description.txt new file mode 100644 index 0000000..f04ace5 --- /dev/null +++ b/search/examples/example_log_msgs_with_plaintext_security/description.txt @@ -0,0 +1,3 @@ +Simple FastStream application which consumes messages from 'document' topic and prints them to log. +Each Document has two attributes: name and content. +The communication with the broker is encrypted with ssl, and uses SASL Plaintext authorization with username "admin" and password "password". diff --git a/search/examples/example_log_msgs_with_plaintext_security/test_app.py b/search/examples/example_log_msgs_with_plaintext_security/test_app.py new file mode 100644 index 0000000..7bfda9b --- /dev/null +++ b/search/examples/example_log_msgs_with_plaintext_security/test_app.py @@ -0,0 +1,16 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Document, broker, on_document + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish( + Document(name="doc.txt", content="Introduction to FastStream"), "document" + ) + on_document.mock.assert_called_with( + dict(Document(name="doc.txt", content="Introduction to FastStream")) + ) diff --git a/search/examples/example_new_employee/__init__.py b/search/examples/example_new_employee/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_new_employee/app.py b/search/examples/example_new_employee/app.py new file mode 100644 index 0000000..64da9e3 --- /dev/null +++ b/search/examples/example_new_employee/app.py @@ -0,0 +1,31 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Employee(BaseModel): + name: str = Field(..., examples=["Mickey"], description="name example") + surname: str = Field(..., examples=["Mouse"], description="surname example") + email: str = Field( + ..., examples=["mikey.mouse@mail.ai"], description="email example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_notify_accounting = broker.publisher("notify_accounting") +to_notify_all_employees = broker.publisher("notify_all_employees") + + +@broker.subscriber("new_employee") +async def on_new_employee(msg: Employee, logger: Logger) -> None: + logger.info(msg) + + await to_notify_accounting.publish( + f"Please prepare all the paper work for: {msg.name} {msg.surname}" + ) + await to_notify_all_employees.publish( + f"Please welcome our new colleague: {msg.name} {msg.surname}" + ) diff --git a/search/examples/example_new_employee/app_skeleton.py b/search/examples/example_new_employee/app_skeleton.py new file mode 100644 index 0000000..1145a32 --- /dev/null +++ b/search/examples/example_new_employee/app_skeleton.py @@ -0,0 +1,35 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Employee(BaseModel): + name: str = Field(..., examples=["Mickey"], description="name example") + surname: str = Field(..., examples=["Mouse"], description="surname example") + email: str = Field( + ..., examples=["mikey.mouse@mail.ai"], description="email example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_notify_accounting = broker.publisher("notify_accounting") +to_notify_all_employees = broker.publisher("notify_all_employees") + + +@broker.subscriber("new_employee") +async def on_new_employee(msg: Employee, logger: Logger) -> None: + """ + Processes a message from the 'new_employee' topic. + Upon reception, the function should publish messages to two topics: + 1. Send message to the 'notify_accounting' with content 'Please prepare all the paper work for:' and add at the end of the message employee name and surname + 2. Send message to the 'notify_all_employees' with content 'Please welcome our new colleague:' and add at the end of the message employee name and surname + + Instructions: + 1. Consume a message from 'new_employee' topic. + 2. Send message to the 'notify_accounting' with content 'Please prepare all the paper work for:' and add at the end of the message employee name and surname + 3. Send message to the 'notify_all_employees' with content 'Please welcome our new colleague:' and add at the end of the message employee name and surname + """ + raise NotImplementedError() diff --git a/search/examples/example_new_employee/description.txt b/search/examples/example_new_employee/description.txt new file mode 100644 index 0000000..57acd31 --- /dev/null +++ b/search/examples/example_new_employee/description.txt @@ -0,0 +1,5 @@ +FastStream app with one subscribes and two produces functions. +App should subscribe to the 'new_employee' topic, The new_employee topic receives Employee object with three attributes: name, surname and email. +For each employee received on this topic, produce two messages: +1. Send message to the 'notify_accounting' with content 'Please prepare all the paper work for:' and add at the end of the message employee name and surname +2. Send message to the 'notify_all_employees' with content 'Please welcome our new colleague:' and add at the end of the message employee name and surname diff --git a/search/examples/example_new_employee/test_app.py b/search/examples/example_new_employee/test_app.py new file mode 100644 index 0000000..01e9247 --- /dev/null +++ b/search/examples/example_new_employee/test_app.py @@ -0,0 +1,33 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Employee, broker, on_new_employee + + +@broker.subscriber("notify_accounting") +async def on_notify_accounting(msg: str) -> None: + pass + + +@broker.subscriber("notify_all_employees") +async def on_notify_all_employees(msg: str) -> None: + pass + + +@pytest.mark.asyncio +async def test_new_employee(): + async with TestKafkaBroker(broker): + await broker.publish( + Employee(name="Liam", surname="Neeson", email="linee@mail"), "new_employee" + ) + on_new_employee.mock.assert_called_once_with( + dict(Employee(name="Liam", surname="Neeson", email="linee@mail")) + ) + + on_notify_accounting.mock.assert_called_once_with( + "Please prepare all the paper work for: Liam Neeson" + ) + on_notify_all_employees.mock.assert_called_once_with( + "Please welcome our new colleague: Liam Neeson" + ) diff --git a/search/examples/example_pets/__init__.py b/search/examples/example_pets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_pets/app.py b/search/examples/example_pets/app.py new file mode 100644 index 0000000..bba54b8 --- /dev/null +++ b/search/examples/example_pets/app.py @@ -0,0 +1,22 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Pet(BaseModel): + pet_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + species: str = Field(..., examples=["dog"], description="Pet example") + age: NonNegativeInt = Field(..., examples=[1], description="Int data example") + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("notify_adopters") +@broker.subscriber("new_pet") +async def on_new_pet(msg: Pet, logger: Logger) -> Pet: + logger.info(msg) + + return msg diff --git a/search/examples/example_pets/app_skeleton.py b/search/examples/example_pets/app_skeleton.py new file mode 100644 index 0000000..d89d788 --- /dev/null +++ b/search/examples/example_pets/app_skeleton.py @@ -0,0 +1,28 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Pet(BaseModel): + pet_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + species: str = Field(..., examples=["dog"], description="Pet example") + age: NonNegativeInt = Field(..., examples=[1], description="Int data example") + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("notify_adopters") +@broker.subscriber("new_pet") +async def on_new_pet(msg: Pet, logger: Logger) -> Pet: + """ + Processes a message from the 'new_pet' topic and send the new pet's information to the 'notify_adopters' topic. + + Instructions: + 1. Consume a message from 'new_pet' topic. + 2. Send the new pet's information to the 'notify_adopters' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_pets/description.txt b/search/examples/example_pets/description.txt new file mode 100644 index 0000000..52ee9de --- /dev/null +++ b/search/examples/example_pets/description.txt @@ -0,0 +1,3 @@ +Create a FastStream application with the localhost broker. +Consume from the 'new_pet' topic, which includes JSON encoded object with attributes: pet_id, species, and age. +Whenever a new pet is added, send the new pet's information to the 'notify_adopters' topic. diff --git a/search/examples/example_pets/test_app.py b/search/examples/example_pets/test_app.py new file mode 100644 index 0000000..9ca7121 --- /dev/null +++ b/search/examples/example_pets/test_app.py @@ -0,0 +1,20 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Pet, broker, on_new_pet + + +@broker.subscriber("notify_adopters") +async def on_notify_adopters(msg: Pet) -> None: + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish(Pet(pet_id=2, species="Dog", age=2), "new_pet") + on_new_pet.mock.assert_called_with(dict(Pet(pet_id=2, species="Dog", age=2))) + on_notify_adopters.mock.assert_called_with( + dict(Pet(pet_id=2, species="Dog", age=2)) + ) diff --git a/search/examples/example_plants/__init__.py b/search/examples/example_plants/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_plants/app.py b/search/examples/example_plants/app.py new file mode 100644 index 0000000..ee68274 --- /dev/null +++ b/search/examples/example_plants/app.py @@ -0,0 +1,27 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Plant(BaseModel): + plant_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + species: str = Field(..., examples=["Apple"], description="Species example") + ready_to_sell: bool + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_sell_plant = broker.publisher("sell_plant") +to_still_growing = broker.publisher("still_growing") + + +@broker.subscriber("plant_growth") +async def on_plant_growth(msg: Plant, logger: Logger) -> None: + logger.info(msg) + + if msg.ready_to_sell: + await to_sell_plant.publish(msg.plant_id) + else: + await to_still_growing.publish(msg.plant_id) diff --git a/search/examples/example_plants/app_skeleton.py b/search/examples/example_plants/app_skeleton.py new file mode 100644 index 0000000..48d89bf --- /dev/null +++ b/search/examples/example_plants/app_skeleton.py @@ -0,0 +1,33 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Plant(BaseModel): + plant_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + species: str = Field(..., examples=["Apple"], description="Species example") + ready_to_sell: bool + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_sell_plant = broker.publisher("sell_plant") +to_still_growing = broker.publisher("still_growing") + + +@broker.subscriber("plant_growth") +async def on_plant_growth(msg: Plant, logger: Logger) -> None: + """ + Processes a message from the 'plant_growth' topic. + Upon reception, the function should verify if the ready_to_sell attribute is True. + If yes, publish plant_id to the 'sell_plant' topic, otherwise, publish plant_id to the 'still_growing' topic. + + Instructions: + 1. Consume a message from 'plant_growth' topic. + 2. Check if the ready_to_sell attribute is True. + 3. If ready_to_sell is True, publish plant_id to the 'sell_plant' topic, otherwise, publish plant_id to the 'still_growing' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_plants/description.txt b/search/examples/example_plants/description.txt new file mode 100644 index 0000000..949e221 --- /dev/null +++ b/search/examples/example_plants/description.txt @@ -0,0 +1,2 @@ +Create a FastStream application for consuming messages from the 'plant_growth' topic which includes JSON encoded object with attributes: plant_id, species and ready_to_sell. +If ready_to_sell attribute is True publish plant_id to the 'sell_plant' topic, otherwise, publish plant_id to the 'still_growing' topic. diff --git a/search/examples/example_plants/test_app.py b/search/examples/example_plants/test_app.py new file mode 100644 index 0000000..2041640 --- /dev/null +++ b/search/examples/example_plants/test_app.py @@ -0,0 +1,43 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Plant, broker, on_plant_growth + + +@broker.subscriber("sell_plant") +async def on_sell_plant(msg: int) -> None: + pass + + +@broker.subscriber("still_growing") +async def on_still_growing(msg: int) -> None: + pass + + +@pytest.mark.asyncio +async def test_sell_plant_is_called(): + async with TestKafkaBroker(broker): + await broker.publish( + Plant(plant_id=1, species="Orange", ready_to_sell=True), "plant_growth" + ) + on_plant_growth.mock.assert_called_with( + dict(Plant(plant_id=1, species="Orange", ready_to_sell=True)) + ) + + on_sell_plant.mock.assert_called_with(1) + on_still_growing.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_still_growing_is_calles(): + async with TestKafkaBroker(broker): + await broker.publish( + Plant(plant_id=1, species="Orange", ready_to_sell=False), "plant_growth" + ) + on_plant_growth.mock.assert_called_with( + dict(Plant(plant_id=1, species="Orange", ready_to_sell=False)) + ) + + on_still_growing.mock.assert_called_with(1) + on_sell_plant.mock.assert_not_called() diff --git a/search/examples/example_product_reviews/__init__.py b/search/examples/example_product_reviews/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_product_reviews/app.py b/search/examples/example_product_reviews/app.py new file mode 100644 index 0000000..3d875be --- /dev/null +++ b/search/examples/example_product_reviews/app.py @@ -0,0 +1,37 @@ +from datetime import datetime + +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class ProductReview(BaseModel): + product_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + customer_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + review_grade: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_customer_service = broker.publisher("customer_service") + + +@broker.subscriber("product_reviews") +async def on_product_reviews(msg: ProductReview, logger: Logger) -> None: + logger.info(msg) + + if msg.review_grade < 5: + await to_customer_service.publish(msg) diff --git a/search/examples/example_product_reviews/app_skeleton.py b/search/examples/example_product_reviews/app_skeleton.py new file mode 100644 index 0000000..3a47d8c --- /dev/null +++ b/search/examples/example_product_reviews/app_skeleton.py @@ -0,0 +1,45 @@ +from datetime import datetime + +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class ProductReview(BaseModel): + product_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + customer_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + review_grade: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + timestamp: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_customer_service = broker.publisher("customer_service") + + +@broker.subscriber("product_reviews") +async def on_product_reviews(msg: ProductReview, logger: Logger) -> None: + """ + Consumes a message from the 'product_reviews' topic. + Upon reception, the function should verify if the review_grade attribute is smaller then 5. If yes, publish alert message to the 'customer_service' topic. + + Instructions: + 1. Consume a message from 'product_reviews' topic. + 2. Create a new message object (do not directly modify the original). + 3. Check if the review_grade attribute is smaller then 5. + 4. If 3. is True, publish alert message to the 'customer_service' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_product_reviews/description.txt b/search/examples/example_product_reviews/description.txt new file mode 100644 index 0000000..5887909 --- /dev/null +++ b/search/examples/example_product_reviews/description.txt @@ -0,0 +1,3 @@ +Create a FastStream application using the localhost broker. +The application should consume from the 'product_reviews' topic which includes JSON encoded objects with attributes: product_id, customer_id, review_grade and timestamp. +If the review_grade attribute is smaller then 5, send an alert message to the 'customer_service' topic. diff --git a/search/examples/example_product_reviews/test_app.py b/search/examples/example_product_reviews/test_app.py new file mode 100644 index 0000000..49ff262 --- /dev/null +++ b/search/examples/example_product_reviews/test_app.py @@ -0,0 +1,54 @@ +from datetime import datetime + +import pytest + +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import ProductReview, broker, on_product_reviews + + +@broker.subscriber("customer_service") +async def on_customer_service(msg: ProductReview) -> None: + pass + + +@pytest.mark.asyncio +async def test_app_where_review_grade_is_grater_then_5(): + async with TestKafkaBroker(broker): + timestamp = datetime.now() + await broker.publish( + ProductReview( + product_id=1, customer_id=1, review_grade=6, timestamp=timestamp + ), + "product_reviews", + ) + + on_product_review_json = model_to_jsonable( + ProductReview( + product_id=1, customer_id=1, review_grade=6, timestamp=timestamp + ) + ) + on_product_reviews.mock.assert_called_with(on_product_review_json) + on_customer_service.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_app_where_review_grade_is_less_then_5(): + async with TestKafkaBroker(broker): + timestamp = datetime.now() + await broker.publish( + ProductReview( + product_id=1, customer_id=2, review_grade=2, timestamp=timestamp + ), + "product_reviews", + ) + + product_review_json = model_to_jsonable( + ProductReview( + product_id=1, customer_id=2, review_grade=2, timestamp=timestamp + ) + ) + + on_product_reviews.mock.assert_called_with(product_review_json) + on_customer_service.mock.assert_called_with(product_review_json) diff --git a/search/examples/example_publish_with_key/__init__.py b/search/examples/example_publish_with_key/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_publish_with_key/app.py b/search/examples/example_publish_with_key/app.py new file mode 100644 index 0000000..04e675e --- /dev/null +++ b/search/examples/example_publish_with_key/app.py @@ -0,0 +1,28 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@broker.subscriber("input_data") +async def on_input_data(msg: Point, logger: Logger) -> None: + logger.info(f"{msg=}") + incremented_point = Point(x=msg.x + 1, y=msg.y + 1) + key = str(msg.x).encode("utf-8") + await to_output_data.publish(incremented_point, key=key) diff --git a/search/examples/example_publish_with_key/app_skeleton.py b/search/examples/example_publish_with_key/app_skeleton.py new file mode 100644 index 0000000..5096685 --- /dev/null +++ b/search/examples/example_publish_with_key/app_skeleton.py @@ -0,0 +1,37 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Point(BaseModel): + x: float = Field( + ..., examples=[0.5], description="The X Coordinate in the coordinate system" + ) + y: float = Field( + ..., examples=[0.5], description="The Y Coordinate in the coordinate system" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +to_output_data = broker.publisher("output_data") + + +@broker.subscriber("input_data") +async def on_input_data(msg: Point, logger: Logger) -> None: + """ + Processes a message from the 'input_data' topic. + Increment msg x and y attributes with 1 and publish that message to the output_data topic. + Publish that message to the output_data topic + Use messages attribute x as a partition key when publishing to output_data topic. + + Instructions: + 1. Consume a message from 'input_data' topic. + 2. Create a new message object (do not directly modify the original). + 3. Increment msg x and y attributes with 1. + 4. Publish that message to the output_data topic (Use messages attribute x as a partition key). + """ + raise NotImplementedError() diff --git a/search/examples/example_publish_with_key/description.txt b/search/examples/example_publish_with_key/description.txt new file mode 100644 index 0000000..74a0f45 --- /dev/null +++ b/search/examples/example_publish_with_key/description.txt @@ -0,0 +1,8 @@ +Develop a FastStream application using localhost kafka broker. +The app should consume messages from the input_data topic. +The input message is a JSON encoded object including two attributes: + - x: float + - y: float + +While consuming the message, increment x and y attributes by 1 and publish that message to the output_data topic. +Use messages attribute x as a partition key when publishing to output_data topic. diff --git a/search/examples/example_publish_with_key/test_app.py b/search/examples/example_publish_with_key/test_app.py new file mode 100644 index 0000000..653758e --- /dev/null +++ b/search/examples/example_publish_with_key/test_app.py @@ -0,0 +1,19 @@ +import pytest + +from faststream import Context +from faststream.kafka import TestKafkaBroker + +from .app import Point, broker, on_input_data + + +@broker.subscriber("output_data") +async def on_output_data(msg: Point, key: bytes = Context("message.raw_message.key")): + pass + + +@pytest.mark.asyncio +async def test_point_was_incremented(): + async with TestKafkaBroker(broker): + await broker.publish(Point(x=1.0, y=2.0), "input_data") + on_input_data.mock.assert_called_with(dict(Point(x=1.0, y=2.0))) + on_output_data.mock.assert_called_with(dict(Point(x=2.0, y=3.0))) diff --git a/search/examples/example_scram256_security/__init__.py b/search/examples/example_scram256_security/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_scram256_security/app.py b/search/examples/example_scram256_security/app.py new file mode 100644 index 0000000..18e751e --- /dev/null +++ b/search/examples/example_scram256_security/app.py @@ -0,0 +1,37 @@ +import os +import ssl +from datetime import date + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLScram256 +from faststream.kafka import KafkaBroker + + +class Student(BaseModel): + name: str = Field(..., examples=["Student Studentis"], description="Name example") + birthdate: date = Field( + ..., + examples=["2023-09-05"], + description="Students birthdate", + ) + + +ssl_context = ssl.create_default_context() +security = SASLScram256( + ssl_context=ssl_context, + username=os.environ["USERNAME"], + password=os.environ["PASSWORD"], +) + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + +to_class = broker.publisher("class") + + +@broker.subscriber("student_application") +async def on_application(msg: Student, logger: Logger) -> None: + key = msg.name.encode("utf-8") + await to_class.publish(msg, key=key) diff --git a/search/examples/example_scram256_security/app_skeleton.py b/search/examples/example_scram256_security/app_skeleton.py new file mode 100644 index 0000000..9860c93 --- /dev/null +++ b/search/examples/example_scram256_security/app_skeleton.py @@ -0,0 +1,43 @@ +import os +import ssl +from datetime import date + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLScram256 +from faststream.kafka import KafkaBroker + + +class Student(BaseModel): + name: str = Field(..., examples=["Student Studentis"], description="Name example") + birthdate: date = Field( + ..., + examples=["2023-09-05"], + description="Students birthdate", + ) + + +ssl_context = ssl.create_default_context() +security = SASLScram256( + ssl_context=ssl_context, + username=os.environ["USERNAME"], + password=os.environ["PASSWORD"], +) + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + + +@broker.publisher("class") +@broker.subscriber("student_application") +async def on_document(msg: Student, logger: Logger) -> Student: + """ + Processes a message from the 'student_application' topic and publises the same message to the 'class' topic using name as key. + + Instructions: + 1. Consume a message from 'student_application' topic. + 2. Publish the same message to 'class' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_scram256_security/description.txt b/search/examples/example_scram256_security/description.txt new file mode 100644 index 0000000..3533865 --- /dev/null +++ b/search/examples/example_scram256_security/description.txt @@ -0,0 +1,5 @@ +FastStream application that handles the incoming students from "student_application" topic. +The Student is then passed to the "class" topic using student_name as key. +Student has a name and birthdate. +The communication with the broker is encrypted with ssl and uses SASL Scram256 for authorization. +Username and pasword are loaded from environment variables. diff --git a/search/examples/example_scram256_security/test_app.py b/search/examples/example_scram256_security/test_app.py new file mode 100644 index 0000000..2cf2218 --- /dev/null +++ b/search/examples/example_scram256_security/test_app.py @@ -0,0 +1,40 @@ +import os +from datetime import date +from unittest import mock + +import pytest + +from faststream import Context +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +with mock.patch.dict( + os.environ, + {"USERNAME": "username", "PASSWORD": "password"}, # pragma: allowlist secret +): + from .app import Student, broker, on_application, to_class + + +@broker.subscriber("class") +async def on_class( + msg: Student, key: bytes = Context("message.raw_message.key") +) -> None: + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + birthdate = date(2020, 9, 5) + await broker.publish( + Student(name="Student Studentis", birthdate=birthdate), + "student_application", + ) + + student_json = model_to_jsonable( + Student(name="Student Studentis", birthdate=birthdate) + ) + + on_application.mock.assert_called_with(student_json) + to_class.mock.assert_called_with(student_json) + on_class.mock.assert_called_with(student_json) diff --git a/search/examples/example_scram512_security/__init__.py b/search/examples/example_scram512_security/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_scram512_security/app.py b/search/examples/example_scram512_security/app.py new file mode 100644 index 0000000..4a1c2fb --- /dev/null +++ b/search/examples/example_scram512_security/app.py @@ -0,0 +1,37 @@ +import os +import ssl + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLScram512 +from faststream.kafka import KafkaBroker + + +class Student(BaseModel): + name: str = Field(..., examples=["Student Studentis"], description="Name example") + age: int = Field( + ..., + examples=[ + 20, + ], + description="Student age", + ) + + +ssl_context = ssl.create_default_context() +security = SASLScram512( + ssl_context=ssl_context, + username=os.environ["USERNAME"], + password=os.environ["PASSWORD"], +) + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + +to_class = broker.publisher("class") + + +@broker.subscriber("student_application") +async def on_application(msg: Student, logger: Logger) -> None: + await to_class.publish(msg) diff --git a/search/examples/example_scram512_security/app_skeleton.py b/search/examples/example_scram512_security/app_skeleton.py new file mode 100644 index 0000000..3939dec --- /dev/null +++ b/search/examples/example_scram512_security/app_skeleton.py @@ -0,0 +1,44 @@ +import os +import ssl + +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.broker.security import SASLScram512 +from faststream.kafka import KafkaBroker + + +class Student(BaseModel): + name: str = Field(..., examples=["Student Studentis"], description="Name example") + age: int = Field( + ..., + examples=[ + 20, + ], + description="Student age", + ) + + +ssl_context = ssl.create_default_context() +security = SASLScram512( + ssl_context=ssl_context, + username=os.environ["USERNAME"], + password=os.environ["PASSWORD"], +) + +broker = KafkaBroker("localhost:9092", security=security) +app = FastStream(broker) + + +@broker.publisher("class") +@broker.subscriber("student_application") +async def on_document(msg: Student, logger: Logger) -> Student: + """ + Processes a message from the 'student_application' topic and publises the same message to the 'class' topic using name as key. + + Instructions: + 1. Consume a message from 'student_application' topic. + 2. Publish the same message to 'class' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_scram512_security/description.txt b/search/examples/example_scram512_security/description.txt new file mode 100644 index 0000000..b7b00fa --- /dev/null +++ b/search/examples/example_scram512_security/description.txt @@ -0,0 +1,4 @@ +FastStream application that handles the incoming students from "student_application" topic. +The Student is then passed to the "class" topic using student_name as key. +Student has a name and age. +The communication with the broker is encrypted with ssl and uses SASL Scram512 for authorization, username and pasword are hardcoded. diff --git a/search/examples/example_scram512_security/test_app.py b/search/examples/example_scram512_security/test_app.py new file mode 100644 index 0000000..40fd2d3 --- /dev/null +++ b/search/examples/example_scram512_security/test_app.py @@ -0,0 +1,34 @@ +import os +from unittest import mock + +import pytest + +from faststream.kafka import TestKafkaBroker + +with mock.patch.dict( + os.environ, + {"USERNAME": "username", "PASSWORD": "password"}, # pragma: allowlist secret +): + from .app import Student, broker, on_application, to_class + + +@broker.subscriber("class") +async def on_class(msg: Student) -> None: + pass + + +@pytest.mark.asyncio +async def test_app(): + async with TestKafkaBroker(broker): + await broker.publish( + Student(name="Student Studentis", age=12), "student_application" + ) + on_application.mock.assert_called_with( + dict(Student(name="Student Studentis", age=12)) + ) + to_class.mock.assert_called_with( + dict(Student(name="Student Studentis", age=12)) + ) + on_class.mock.assert_called_with( + dict(Student(name="Student Studentis", age=12)) + ) diff --git a/search/examples/example_scrape_weather/__init__.py b/search/examples/example_scrape_weather/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_scrape_weather/app.py b/search/examples/example_scrape_weather/app.py new file mode 100644 index 0000000..4a2a2e7 --- /dev/null +++ b/search/examples/example_scrape_weather/app.py @@ -0,0 +1,103 @@ +import asyncio +import json +from datetime import datetime + +import httpx +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +publisher = broker.publisher("weather") + + +class Weather(BaseModel): + latitude: NonNegativeFloat = Field( + ..., + examples=[22.5], + description="Latitude measures the distance north or south of the equator.", + ) + longitude: NonNegativeFloat = Field( + ..., + examples=[55], + description="Longitude measures distance east or west of the prime meridian.", + ) + temperature: float = Field( + ..., examples=[20], description="Temperature in Celsius degrees" + ) + windspeed: NonNegativeFloat = Field( + ..., examples=[20], description="Wind speed in kilometers per hour" + ) + time: str = Field( + ..., examples=["2023-09-13T07:00"], description="The time of the day" + ) + + +@app.on_startup +async def app_setup(context: ContextRepo): + context.set_global("app_is_running", True) + + +@app.on_shutdown +async def shutdown(context: ContextRepo): + context.set_global("app_is_running", False) + + # Get all the running tasks and wait them to finish + publish_tasks = context.get("publish_tasks") + await asyncio.gather(*publish_tasks) + + +async def _fetch_and_publish_weather( + latitude: float, + longitude: float, + logger: Logger, + context: ContextRepo, + time_interval: int = 5, +) -> None: + # Always use context: ContextRepo for storing app_is_running variable + while context.get("app_is_running"): + uri = f"https://api.open-meteo.com/v1/forecast?current_weather=true&latitude={latitude}&longitude={longitude}" + async with httpx.AsyncClient() as client: + response = await client.get(uri) + + if response.status_code == 200: + # read json response + raw_data = json.loads(response.content) + temperature = raw_data["current_weather"]["temperature"] + windspeed = raw_data["current_weather"]["windspeed"] + time = raw_data["current_weather"]["time"] + + new_data = Weather( + latitude=latitude, + longitude=longitude, + temperature=temperature, + windspeed=windspeed, + time=time, + ) + key = str(latitude) + "_" + str(longitude) + await publisher.publish(new_data, key=key.encode("utf-8")) + else: + logger.warning(f"Failed API request {uri} at time {datetime.now()}") + + await asyncio.sleep(time_interval) + + +@app.after_startup +async def publish_weather(logger: Logger, context: ContextRepo): + logger.info("Starting publishing:") + + latitudes = [13, 50, 44, 24] + longitudes = [17, 13, 45, 70] + # start scraping and producing to kafka topic + publish_tasks = [ + asyncio.create_task( + _fetch_and_publish_weather(latitude, longitude, logger, context) + ) + for latitude, longitude in zip(latitudes, longitudes) + ] + # you need to save asyncio tasks so you can wait them to finish at app shutdown (the function with @app.on_shutdown function) + context.set_global("publish_tasks", publish_tasks) diff --git a/search/examples/example_scrape_weather/app_skeleton.py b/search/examples/example_scrape_weather/app_skeleton.py new file mode 100644 index 0000000..e052fdc --- /dev/null +++ b/search/examples/example_scrape_weather/app_skeleton.py @@ -0,0 +1,89 @@ +from pydantic import BaseModel, Field, NonNegativeFloat + +from faststream import ContextRepo, FastStream, Logger +from faststream.kafka import KafkaBroker + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +class Weather(BaseModel): + latitude: NonNegativeFloat = Field( + ..., + examples=[22.5], + description="Latitude measures the distance north or south of the equator.", + ) + longitude: NonNegativeFloat = Field( + ..., + examples=[55], + description="Longitude measures distance east or west of the prime meridian.", + ) + temperature: float = Field( + ..., examples=[20], description="Temperature in Celsius degrees" + ) + windspeed: NonNegativeFloat = Field( + ..., examples=[20], description="Wind speed in kilometers per hour" + ) + time: str = Field( + ..., examples=["2023-09-13T07:00"], description="The time of the day" + ) + + +@app.on_startup +async def app_setup(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set app_is_running to True - we will use this variable as running loop condition + """ + raise NotImplementedError() + + +@app.on_shutdown +async def shutdown(context: ContextRepo): + """ + Set all necessary global variables inside ContextRepo object: + Set app_is_running to False + + Get all executed tasks from context and wait them to finish + """ + raise NotImplementedError() + + +async def fetch_and_publish_weather( + latitude: float, + longitude: float, + logger: Logger, + context: ContextRepo, + time_inverval: int = 5, +) -> None: + """ + While app_is_running variable inside context is True, repeat the following process: + get the weather information by sending a GET request to "https://api.open-meteo.com/v1/forecast?current_weather=true" + At the end of url you should add additional 'latitude' and 'longitude' parameters which are type float. + Here is url example when you want to fetch information for latitude=52.3 and longitude=13.2: + "https://api.open-meteo.com/v1/forecast?current_weather=true&latitude=52.3&longitude=13.2" + + from the response we want to get info about the temperature (float), windspeed (float) and time (string) and you can find them in: + response["current_weather"]["temperature"], response["current_weather"]["windspeed"], and response["current_weather"]["time"] + + We need to fetch this data, construct the Weather object and publish it at 'weather' topic. + For each message you are publishing we must use a key which will be constructed as: + string value of latitude + '_' + string value of longitude + + asynchronous sleep for time_interval + """ + raise NotImplementedError() + + +@app.after_startup +async def publish_weather(logger: Logger, context: ContextRepo): + """ + Create asynchronous tasks for executing fetch_and_publish_weather function. + Run this process for the following latitude and longitude combinations: + - latitude=13 and longitude=17 + - latitude=50 and longitude=13 + - latitude=44 and longitude=45 + - latitude=24 and longitude=70 + Put all executed tasks to list and set it as global variable in context (It is needed so we can wait for this tasks at app shutdown) + """ + raise NotImplementedError() diff --git a/search/examples/example_scrape_weather/description.txt b/search/examples/example_scrape_weather/description.txt new file mode 100644 index 0000000..593c2e8 --- /dev/null +++ b/search/examples/example_scrape_weather/description.txt @@ -0,0 +1,26 @@ +Develop a FastStream application which will fetch weather information from the web until the app shuts down. + +You can get the weather information by sending a GET request to "https://api.open-meteo.com/v1/forecast?current_weather=true" +At the end of url you should add additional 'latitude' and 'longitude' parameters which are type float. +Here is url example when you want to fetch information for latitude=52.3 and longitude=13.2: + "https://api.open-meteo.com/v1/forecast?current_weather=true&latitude=52.3&longitude=13.2" + +from the response we want to get info about the temperature (float), windspeed (float) and time (string) and you can find them in: + response["current_weather"]["temperature"], response["current_weather"]["windspeed"], and response["current_weather"]["time"] + +We need to fetch this data every 5 seconds and publish it at 'weather' topic. +For each message you are publishing we must use a key which will be constructed as: + string value of latitude + '_' + string value of longitude + +Message that we will publish needs to have following parameters: + - latitude (type float) + - longitude (type float) + - temperature (type float) + - windspeed (type float) + - time (type string) + +We need this process for the following latitude and longitude combinations: + - latitude=13 and longitude=17 + - latitude=50 and longitude=13 + - latitude=44 and longitude=45 + - latitude=24 and longitude=70 diff --git a/search/examples/example_scrape_weather/test_app.py b/search/examples/example_scrape_weather/test_app.py new file mode 100644 index 0000000..41dbfdf --- /dev/null +++ b/search/examples/example_scrape_weather/test_app.py @@ -0,0 +1,19 @@ +import pytest + +from faststream import Context, TestApp +from faststream.kafka import TestKafkaBroker + +from .app import Weather, app, broker + + +@broker.subscriber("weather") +async def on_weather(msg: Weather, key: bytes = Context("message.raw_message.key")): + pass + + +@pytest.mark.asyncio +async def test_message_was_published(): + async with TestKafkaBroker(broker): + async with TestApp(app): + await on_weather.wait_call(3) + on_weather.mock.assert_called() diff --git a/search/examples/example_social_media_post/__init__.py b/search/examples/example_social_media_post/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_social_media_post/app.py b/search/examples/example_social_media_post/app.py new file mode 100644 index 0000000..9b187e1 --- /dev/null +++ b/search/examples/example_social_media_post/app.py @@ -0,0 +1,34 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Post(BaseModel): + user_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + text: str = Field(..., examples=["Just another day"], description="text example") + number_of_likes: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("just_text") +@broker.subscriber("popular_post") +async def on_popular_post(msg: Post, logger: Logger) -> str: + logger.info(msg) + return msg.text + + +to_popular_post = broker.publisher("popular_post") + + +@broker.subscriber("new_post") +async def on_new_post(msg: Post, logger: Logger) -> None: + logger.info(msg) + + if msg.number_of_likes > 10: + await to_popular_post.publish(msg) diff --git a/search/examples/example_social_media_post/app_skeleton.py b/search/examples/example_social_media_post/app_skeleton.py new file mode 100644 index 0000000..a090821 --- /dev/null +++ b/search/examples/example_social_media_post/app_skeleton.py @@ -0,0 +1,49 @@ +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class Post(BaseModel): + user_id: NonNegativeInt = Field(..., examples=[1], description="Int data example") + text: str = Field(..., examples=["Just another day"], description="text example") + number_of_likes: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + + +@broker.publisher("just_text") +@broker.subscriber("popular_post") +async def on_popular_post(msg: Post, logger: Logger) -> str: + """ + Processes a message from the 'popular_post' topic. + Publishes the text attribute of the post to the 'just_text' topic + + Instructions: + 1. Consume a message from 'popular_post' topic. + 2. Publishes the text attribute of the post to the 'just_text' topic + + """ + raise NotImplementedError() + + +to_popular_post = broker.publisher("popular_post") + + +@broker.subscriber("new_post") +async def on_new_post(msg: Post, logger: Logger) -> None: + """ + Processes a message from the 'new_post' topic. + Upon reception, the function should check if the number_of_likes attribute is grater then 10. If yes, retrieve the current message to the 'popular_post' topic. + + Instructions: + 1. Consume a message from 'new_post' topic. + 2. Check if the number_of_likes attribute is grater then 10. + 3. If 2. is True, retrieve the current message to the 'popular_post' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_social_media_post/description.txt b/search/examples/example_social_media_post/description.txt new file mode 100644 index 0000000..504b9d2 --- /dev/null +++ b/search/examples/example_social_media_post/description.txt @@ -0,0 +1,4 @@ +FastStream application for social media. +Create function which reads from the 'new_post' topic. Messages which come to this topic have 3 attributes: user_id, text and number_of_likes. +If received Post has more then 10 likes, publish it to the 'popular_post' topic. +While consuming from the popular_post topic, publish the text attribute of the post to the 'just_text' topic diff --git a/search/examples/example_social_media_post/test_app.py b/search/examples/example_social_media_post/test_app.py new file mode 100644 index 0000000..134d13b --- /dev/null +++ b/search/examples/example_social_media_post/test_app.py @@ -0,0 +1,38 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import Post, broker, on_new_post, on_popular_post + + +@broker.subscriber("just_text") +async def on_just_text(msg: str) -> None: + pass + + +@pytest.mark.asyncio +async def test_post_with_just_two_likes(): + async with TestKafkaBroker(broker): + await broker.publish( + Post(user_id=1, text="bad post", number_of_likes=2), "new_post" + ) + on_new_post.mock.assert_called_with( + dict(Post(user_id=1, text="bad post", number_of_likes=2)) + ) + on_popular_post.mock.assert_not_called() + on_just_text.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_post_with_many_likes(): + async with TestKafkaBroker(broker): + await broker.publish( + Post(user_id=1, text="cool post", number_of_likes=100), "new_post" + ) + on_new_post.mock.assert_called_with( + dict(Post(user_id=1, text="cool post", number_of_likes=100)) + ) + on_popular_post.mock.assert_called_with( + dict(Post(user_id=1, text="cool post", number_of_likes=100)) + ) + on_just_text.mock.assert_called_with("cool post") diff --git a/search/examples/example_student_query/__init__.py b/search/examples/example_student_query/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_student_query/app.py b/search/examples/example_student_query/app.py new file mode 100644 index 0000000..4c2f17d --- /dev/null +++ b/search/examples/example_student_query/app.py @@ -0,0 +1,46 @@ +from datetime import datetime + +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class StudentQuery(BaseModel): + student_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + department: str = Field( + ..., examples=["axademic_department"], description="Department example" + ) + query: str = Field( + ..., examples=["Please help me with..."], description="Query example" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_finance_department = broker.publisher("finance_department") +to_academic_department = broker.publisher("academic_department") +to_admissions_department = broker.publisher("admissions_department") +to_unclassified_query = broker.publisher("unclassified_query") + + +@broker.subscriber("student_query") +async def on_student_query(msg: StudentQuery, logger: Logger) -> None: + logger.info(msg) + + if msg.department == "finance_department": + await to_finance_department.publish(msg) + elif msg.department == "academic_department": + await to_academic_department.publish(msg) + elif msg.department == "admissions_department": + await to_admissions_department.publish(msg) + else: + await to_unclassified_query.publish(msg) diff --git a/search/examples/example_student_query/app_skeleton.py b/search/examples/example_student_query/app_skeleton.py new file mode 100644 index 0000000..117f2a1 --- /dev/null +++ b/search/examples/example_student_query/app_skeleton.py @@ -0,0 +1,49 @@ +from datetime import datetime + +from pydantic import BaseModel, Field, NonNegativeInt + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class StudentQuery(BaseModel): + student_id: NonNegativeInt = Field( + ..., examples=[1], description="Int data example" + ) + department: str = Field( + ..., examples=["axademic_department"], description="Department example" + ) + query: str = Field( + ..., examples=["Please help me with..."], description="Query example" + ) + time: datetime = Field( + ..., + examples=["2020-04-23 10:20:30.400000"], + description="The timestamp of the record", + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_finance_department = broker.publisher("finance_department") +to_academic_department = broker.publisher("academic_department") +to_admissions_department = broker.publisher("admissions_department") +to_unclassified_query = broker.publisher("unclassified_query") + + +@broker.subscriber("student_query") +async def on_student_query(msg: StudentQuery, logger: Logger) -> None: + """ + Processes a message from the 'student_query'. + Each query should then be forwarded to the corresponding department based on the department attribute. + The relevant department topics could be 'finance_department', 'academic_department', or 'admissions_department'. + If department is not one of these topics, forward the message to the 'unclassified_query' topic. + + Instructions: + 1. Consume a message from 'student_query' topic. + 2. Check the department attribute - The relevant department topics could be 'finance_department', 'academic_department', or 'admissions_department'. + 3. If departman is one of the relevant departments, forward the message to that topic. Otherwise forward the message to the 'unclassified_query' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_student_query/description.txt b/search/examples/example_student_query/description.txt new file mode 100644 index 0000000..bc9c90d --- /dev/null +++ b/search/examples/example_student_query/description.txt @@ -0,0 +1,4 @@ +Create a FastStream application using localhost as a broker. Consume from 'student_query' topic, which includes attributes: student_id, department and query, time. +Each query should then be forwarded to the corresponding department based on the department attribute. +The relevant department topics could be 'finance_department', 'academic_department', or 'admissions_department'. +If department is not one of these topics, forward the message to the 'unclassified_query' topic. diff --git a/search/examples/example_student_query/test_app.py b/search/examples/example_student_query/test_app.py new file mode 100644 index 0000000..a4c13a8 --- /dev/null +++ b/search/examples/example_student_query/test_app.py @@ -0,0 +1,58 @@ +from datetime import datetime + +import pytest + +from faststream._compat import model_to_jsonable +from faststream.kafka import TestKafkaBroker + +from .app import StudentQuery, broker, on_student_query + + +@broker.subscriber("finance_department") +async def on_finance_department(msg: StudentQuery) -> None: + pass + + +@broker.subscriber("academic_department") +async def on_academic_department(msg: StudentQuery) -> None: + pass + + +@broker.subscriber("admissions_department") +async def on_admissions_department(msg: StudentQuery) -> None: + pass + + +@broker.subscriber("unclassified_query") +async def on_unclassified_query(msg: StudentQuery) -> None: + pass + + +@pytest.mark.asyncio +async def test_message_published_to_correct_topic(): + async with TestKafkaBroker(broker): + time = datetime.now() + await broker.publish( + StudentQuery( + student_id=1, + department="admissions_department", + query="Help me with...", + time=time, + ), + "student_query", + ) + student_query_json = model_to_jsonable( + StudentQuery( + student_id=1, + department="admissions_department", + query="Help me with...", + time=time, + ) + ) + + on_student_query.mock.assert_called_with(student_query_json) + on_admissions_department.mock.assert_called_with(student_query_json) + + on_finance_department.mock.assert_not_called() + on_academic_department.mock.assert_not_called() + on_unclassified_query.mock.assert_not_called() diff --git a/search/examples/example_weather_updates/__init__.py b/search/examples/example_weather_updates/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/search/examples/example_weather_updates/app.py b/search/examples/example_weather_updates/app.py new file mode 100644 index 0000000..780fbed --- /dev/null +++ b/search/examples/example_weather_updates/app.py @@ -0,0 +1,30 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class WeatherConditions(BaseModel): + city: str = Field(..., examples=["Zagreb"], description="City example") + temperature: float + conditions: str = Field( + ..., examples=["Mostly Cloudy"], description="Conditions example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_weather_alerts = broker.publisher("weather_alerts") + + +@broker.subscriber("weather_updates") +async def on_weather_updates(msg: WeatherConditions, logger: Logger) -> None: + logger.info(msg) + + if msg.temperature > 40 or msg.temperature < -10: + alert_city = "Alert: " + msg.city + alert_msg = WeatherConditions( + city=alert_city, temperature=msg.temperature, conditions=msg.conditions + ) + await to_weather_alerts.publish(alert_msg) diff --git a/search/examples/example_weather_updates/app_skeleton.py b/search/examples/example_weather_updates/app_skeleton.py new file mode 100644 index 0000000..2c9e3c2 --- /dev/null +++ b/search/examples/example_weather_updates/app_skeleton.py @@ -0,0 +1,35 @@ +from pydantic import BaseModel, Field + +from faststream import FastStream, Logger +from faststream.kafka import KafkaBroker + + +class WeatherConditions(BaseModel): + city: str = Field(..., examples=["Zagreb"], description="City example") + temperature: float + conditions: str = Field( + ..., examples=["Mostly Cloudy"], description="Conditions example" + ) + + +broker = KafkaBroker("localhost:9092") +app = FastStream(broker) + +to_weather_alerts = broker.publisher("weather_alerts") + + +@broker.subscriber("weather_updates") +async def on_weather_updates(msg: WeatherConditions, logger: Logger) -> None: + """ + Processes a message from the 'weather_updates' topic. + Upon reception, the function should verify if the temperature attribute is above 40 or below -10. + If yes, append the string 'Alert: ' to the city attribute and publish this message to 'weather_alerts' topic. + + Instructions: + 1. Consume a message from 'weather_updates' topic. + 2. Create a new message object (do not directly modify the original). + 3. Check if the temperature attribute is above 40 or below -10. + 4. If 3. is True, append the string 'Alert: ' to the city attribute and publish this message to 'weather_alerts' topic. + + """ + raise NotImplementedError() diff --git a/search/examples/example_weather_updates/description.txt b/search/examples/example_weather_updates/description.txt new file mode 100644 index 0000000..f826433 --- /dev/null +++ b/search/examples/example_weather_updates/description.txt @@ -0,0 +1,3 @@ +FastStream application for consuming messages from 'weather_updates' topic where the message includes attributes: city, temperature, and conditions. +For every consumed message, append the string 'Alert: ' to the city attribute if the temperature attribute is above 40 or below -10. +Publish this message to 'weather_alerts' topic. diff --git a/search/examples/example_weather_updates/test_app.py b/search/examples/example_weather_updates/test_app.py new file mode 100644 index 0000000..aed2012 --- /dev/null +++ b/search/examples/example_weather_updates/test_app.py @@ -0,0 +1,44 @@ +import pytest + +from faststream.kafka import TestKafkaBroker + +from .app import WeatherConditions, broker, on_weather_updates + + +@broker.subscriber("weather_alerts") +async def on_weather_alerts(msg: WeatherConditions) -> None: + pass + + +@pytest.mark.asyncio +async def test_not_published_to_weather_alerts(): + async with TestKafkaBroker(broker): + await broker.publish( + WeatherConditions(city="Zagreb", temperature=20.5, conditions="Sunny"), + "weather_updates", + ) + on_weather_updates.mock.assert_called_with( + dict(WeatherConditions(city="Zagreb", temperature=20.5, conditions="Sunny")) + ) + + on_weather_alerts.mock.assert_not_called() + + +@pytest.mark.asyncio +async def test_published_to_weather_alerts(): + async with TestKafkaBroker(broker): + await broker.publish( + WeatherConditions(city="Zagreb", temperature=-15, conditions="Sunny"), + "weather_updates", + ) + on_weather_updates.mock.assert_called_with( + dict(WeatherConditions(city="Zagreb", temperature=-15, conditions="Sunny")) + ) + + on_weather_alerts.mock.assert_called_with( + dict( + WeatherConditions( + city="Alert: Zagreb", temperature=-15, conditions="Sunny" + ) + ) + ) From 072505ff0e8dd6ac62a991dcf054536d541d94f8 Mon Sep 17 00:00:00 2001 From: harishmohanraj Date: Mon, 9 Oct 2023 11:56:10 +0000 Subject: [PATCH 2/5] Update version --- faststream_gen/__init__.py | 2 +- settings.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/faststream_gen/__init__.py b/faststream_gen/__init__.py index 32efefd..5c796c0 100644 --- a/faststream_gen/__init__.py +++ b/faststream_gen/__init__.py @@ -1 +1 @@ -__version__ = "0.1.6" \ No newline at end of file +__version__ = "0.1.7rc0" \ No newline at end of file diff --git a/settings.ini b/settings.ini index 6c38cd3..4e7661a 100644 --- a/settings.ini +++ b/settings.ini @@ -5,7 +5,7 @@ ### Python library ### repo = faststream-gen lib_name = %(repo)s -version = 0.1.6 +version = 0.1.7rc0 min_python = 3.8 license = apache2 black_formatting = False From 86f8e2c0e293ea5e0d0070f06017228c77bec0f6 Mon Sep 17 00:00:00 2001 From: harishmohanraj Date: Mon, 9 Oct 2023 12:32:01 +0000 Subject: [PATCH 3/5] Remove unwanted files --- search/docs/api/faststream/app/ABCApp.md | 3 - search/docs/api/faststream/app/FastStream.md | 3 - .../asyncapi/base/AsyncAPIOperation.md | 3 - .../generate/get_app_broker_channels.md | 3 - .../generate/get_app_broker_server.md | 3 - .../asyncapi/generate/get_app_schema.md | 3 - .../asyncapi/message/get_model_schema.md | 3 - .../asyncapi/message/get_response_schema.md | 3 - .../asyncapi/message/parse_handler_params.md | 3 - .../schema/bindings/amqp/ChannelBinding.md | 3 - .../asyncapi/schema/bindings/amqp/Exchange.md | 3 - .../schema/bindings/amqp/OperationBinding.md | 3 - .../asyncapi/schema/bindings/amqp/Queue.md | 3 - .../schema/bindings/amqp/ServerBinding.md | 3 - .../schema/bindings/kafka/ChannelBinding.md | 3 - .../schema/bindings/kafka/OperationBinding.md | 3 - .../schema/bindings/kafka/ServerBinding.md | 3 - .../schema/bindings/main/ChannelBinding.md | 3 - .../schema/bindings/main/OperationBinding.md | 3 - .../schema/bindings/main/ServerBinding.md | 3 - .../schema/bindings/nats/ChannelBinding.md | 3 - .../schema/bindings/nats/OperationBinding.md | 3 - .../schema/bindings/nats/ServerBinding.md | 3 - .../schema/bindings/redis/ChannelBinding.md | 3 - .../schema/bindings/redis/OperationBinding.md | 3 - .../schema/bindings/redis/ServerBinding.md | 3 - .../schema/bindings/sqs/ChannelBinding.md | 3 - .../schema/bindings/sqs/OperationBinding.md | 3 - .../schema/bindings/sqs/ServerBinding.md | 3 - .../asyncapi/schema/channels/Channel.md | 3 - .../asyncapi/schema/info/Contact.md | 3 - .../asyncapi/schema/info/ContactDict.md | 3 - .../asyncapi/schema/info/EmailStr.md | 3 - .../faststream/asyncapi/schema/info/Info.md | 3 - .../asyncapi/schema/info/License.md | 3 - .../asyncapi/schema/info/LicenseDict.md | 3 - .../asyncapi/schema/main/Components.md | 3 - .../faststream/asyncapi/schema/main/Schema.md | 3 - .../asyncapi/schema/message/CorrelationId.md | 3 - .../asyncapi/schema/message/Message.md | 3 - .../asyncapi/schema/operations/Operation.md | 3 - .../asyncapi/schema/security/OauthFlowObj.md | 3 - .../asyncapi/schema/security/OauthFlows.md | 3 - .../security/SecuritySchemaComponent.md | 3 - .../asyncapi/schema/servers/Server.md | 3 - .../asyncapi/schema/servers/ServerVariable.md | 3 - .../asyncapi/schema/utils/ExternalDocs.md | 3 - .../asyncapi/schema/utils/ExternalDocsDict.md | 3 - .../asyncapi/schema/utils/Parameter.md | 3 - .../asyncapi/schema/utils/Reference.md | 3 - .../faststream/asyncapi/schema/utils/Tag.md | 3 - .../asyncapi/schema/utils/TagDict.md | 3 - .../asyncapi/site/get_asyncapi_html.md | 3 - .../api/faststream/asyncapi/site/serve_app.md | 3 - .../asyncapi/utils/resolve_payloads.md | 3 - .../faststream/asyncapi/utils/to_camelcase.md | 3 - .../broker/core/abc/BrokerUsecase.md | 3 - .../broker/core/abc/extend_dependencies.md | 3 - .../core/asyncronous/BrokerAsyncUsecase.md | 3 - .../broker/core/asyncronous/default_filter.md | 3 - .../broker/core/mixins/LoggingMixin.md | 3 - .../broker/fastapi/route/StreamMessage.md | 3 - .../broker/fastapi/route/StreamRoute.md | 3 - .../broker/fastapi/route/get_app.md | 3 - .../broker/fastapi/router/StreamRouter.md | 3 - .../faststream/broker/handler/AsyncHandler.md | 3 - .../faststream/broker/handler/BaseHandler.md | 3 - .../broker/message/ABCStreamMessage.md | 3 - .../broker/message/StreamMessage.md | 3 - .../broker/message/SyncStreamMessage.md | 3 - .../broker/middlewares/BaseMiddleware.md | 3 - .../middlewares/CriticalLogMiddleware.md | 3 - .../broker/parsers/decode_message.md | 3 - .../broker/parsers/encode_message.md | 3 - .../broker/parsers/resolve_custom_func.md | 3 - .../broker/publisher/BasePublisher.md | 3 - .../broker/push_back_watcher/BaseWatcher.md | 3 - .../push_back_watcher/CounterWatcher.md | 3 - .../push_back_watcher/EndlessWatcher.md | 3 - .../broker/push_back_watcher/OneTryWatcher.md | 3 - .../push_back_watcher/WatcherContext.md | 3 - .../faststream/broker/router/BrokerRoute.md | 3 - .../faststream/broker/router/BrokerRouter.md | 3 - .../faststream/broker/schemas/NameRequired.md | 3 - .../faststream/broker/schemas/RawDecoced.md | 3 - .../broker/security/BaseSecurity.md | 3 - .../broker/security/SASLPlaintext.md | 3 - .../broker/security/SASLScram256.md | 3 - .../broker/security/SASLScram512.md | 3 - .../api/faststream/broker/test/TestApp.md | 3 - .../api/faststream/broker/test/TestBroker.md | 3 - .../faststream/broker/test/call_handler.md | 3 - .../broker/test/patch_broker_calls.md | 3 - .../broker/types/AsyncPublisherProtocol.md | 3 - .../broker/utils/change_logger_handlers.md | 3 - .../faststream/broker/utils/get_watcher.md | 3 - .../broker/utils/set_message_context.md | 3 - .../broker/wrapper/FakePublisher.md | 3 - .../broker/wrapper/HandlerCallWrapper.md | 3 - .../docs/api/faststream/cli/docs/app/gen.md | 3 - .../docs/api/faststream/cli/docs/app/serve.md | 3 - search/docs/api/faststream/cli/main/main.md | 3 - search/docs/api/faststream/cli/main/run.md | 3 - .../faststream/cli/main/version_callback.md | 3 - .../cli/supervisors/basereload/BaseReload.md | 3 - .../supervisors/multiprocess/Multiprocess.md | 3 - .../cli/supervisors/utils/get_subprocess.md | 3 - .../cli/supervisors/utils/set_exit.md | 3 - .../supervisors/utils/subprocess_started.md | 3 - .../supervisors/watchfiles/ExtendedFilter.md | 3 - .../supervisors/watchfiles/WatchReloader.md | 3 - .../cli/utils/imports/get_app_path.md | 3 - .../cli/utils/imports/import_object.md | 3 - .../cli/utils/imports/try_import_app.md | 3 - .../faststream/cli/utils/logs/LogLevels.md | 3 - .../cli/utils/logs/get_log_level.md | 3 - .../cli/utils/logs/set_log_level.md | 3 - .../cli/utils/parser/parse_cli_args.md | 3 - .../cli/utils/parser/remove_prefix.md | 3 - .../api/faststream/constants/ContentTypes.md | 3 - .../api/faststream/exceptions/AckMessage.md | 3 - .../faststream/exceptions/HandlerException.md | 3 - .../api/faststream/exceptions/NackMessage.md | 3 - .../faststream/exceptions/RejectMessage.md | 3 - .../api/faststream/exceptions/SkipMessage.md | 3 - .../api/faststream/exceptions/StopConsume.md | 3 - .../api/faststream/kafka/asyncapi/Handler.md | 3 - .../faststream/kafka/asyncapi/Publisher.md | 3 - .../faststream/kafka/broker/KafkaBroker.md | 3 - .../faststream/kafka/fastapi/KafkaRouter.md | 3 - .../faststream/kafka/handler/LogicHandler.md | 3 - .../faststream/kafka/message/KafkaMessage.md | 3 - .../faststream/kafka/parser/AioKafkaParser.md | 3 - .../kafka/producer/AioKafkaFastProducer.md | 3 - .../kafka/publisher/LogicPublisher.md | 3 - .../faststream/kafka/router/KafkaRouter.md | 3 - .../kafka/security/parse_security.md | 3 - .../kafka/shared/logging/KafkaLoggingMixin.md | 3 - .../kafka/shared/publisher/ABCPublisher.md | 3 - .../kafka/shared/router/KafkaRouter.md | 3 - .../schemas/ConsumerConnectionParams.md | 3 - .../api/faststream/kafka/test/FakeProducer.md | 3 - .../faststream/kafka/test/TestKafkaBroker.md | 3 - .../faststream/kafka/test/build_message.md | 3 - .../log/formatter/ColourizedFormatter.md | 3 - .../log/formatter/expand_log_field.md | 3 - .../log/formatter/make_record_with_extra.md | 3 - .../log/logging/configure_formatter.md | 3 - .../api/faststream/nats/asyncapi/Handler.md | 3 - .../api/faststream/nats/asyncapi/Publisher.md | 3 - .../api/faststream/nats/broker/NatsBroker.md | 3 - .../api/faststream/nats/fastapi/NatsRouter.md | 3 - .../nats/handler/LogicNatsHandler.md | 3 - .../faststream/nats/helpers/StreamBuilder.md | 3 - .../api/faststream/nats/js_stream/JStream.md | 3 - .../faststream/nats/message/NatsMessage.md | 3 - .../api/faststream/nats/parser/NatsParser.md | 3 - .../nats/producer/NatsFastProducer.md | 3 - .../nats/producer/NatsJSFastProducer.md | 3 - .../nats/publisher/LogicPublisher.md | 3 - .../api/faststream/nats/router/NatsRouter.md | 3 - .../nats/shared/logging/NatsLoggingMixin.md | 3 - .../nats/shared/router/NatsRouter.md | 3 - .../api/faststream/nats/test/FakeProducer.md | 3 - .../faststream/nats/test/PatchedMessage.md | 3 - .../faststream/nats/test/TestNatsBroker.md | 3 - .../api/faststream/nats/test/build_message.md | 3 - .../api/faststream/rabbit/asyncapi/Handler.md | 3 - .../faststream/rabbit/asyncapi/Publisher.md | 3 - .../rabbit/asyncapi/RMQAsyncAPIChannel.md | 3 - .../faststream/rabbit/broker/RabbitBroker.md | 3 - .../faststream/rabbit/fastapi/RabbitRouter.md | 3 - .../faststream/rabbit/handler/LogicHandler.md | 3 - .../rabbit/helpers/RabbitDeclarer.md | 3 - .../rabbit/message/RabbitMessage.md | 3 - .../faststream/rabbit/parser/AioPikaParser.md | 3 - .../rabbit/producer/AioPikaFastProducer.md | 3 - .../rabbit/publisher/LogicPublisher.md | 3 - .../faststream/rabbit/router/RabbitRouter.md | 3 - .../rabbit/security/parse_security.md | 3 - .../rabbit/shared/constants/ExchangeType.md | 3 - .../shared/logging/RabbitLoggingMixin.md | 3 - .../rabbit/shared/publisher/ABCPublisher.md | 3 - .../rabbit/shared/router/RabbitRouter.md | 3 - .../shared/schemas/BaseRMQInformation.md | 3 - .../rabbit/shared/schemas/RabbitExchange.md | 3 - .../rabbit/shared/schemas/RabbitQueue.md | 3 - .../rabbit/shared/schemas/get_routing_hash.md | 3 - .../faststream/rabbit/test/FakeProducer.md | 3 - .../faststream/rabbit/test/PatchedMessage.md | 3 - .../rabbit/test/TestRabbitBroker.md | 3 - .../faststream/rabbit/test/build_message.md | 3 - .../api/faststream/utils/classes/Singleton.md | 3 - .../utils/context/main/ContextRepo.md | 3 - .../faststream/utils/context/types/Context.md | 3 - .../utils/context/types/resolve_context.md | 3 - .../faststream/utils/data/filter_by_dict.md | 3 - .../get_function_positional_arguments.md | 3 - .../utils/functions/timeout_scope.md | 3 - .../faststream/utils/functions/to_async.md | 3 - .../api/faststream/utils/no_cast/NoCast.md | 3 - .../docs/getting-started/asyncapi/custom.md | 188 ---------- .../docs/getting-started/asyncapi/export.md | 56 --- .../docs/getting-started/asyncapi/hosting.md | 38 -- search/docs/getting-started/cli/index.md | 128 ------- search/docs/getting-started/config/index.md | 167 --------- search/docs/getting-started/context/custom.md | 83 ----- .../docs/getting-started/context/existed.md | 82 ---- search/docs/getting-started/context/extra.md | 60 --- search/docs/getting-started/context/fields.md | 11 - search/docs/getting-started/context/index.md | 66 ---- .../contributing/CONTRIBUTING.md | 143 ------- .../docs/getting-started/contributing/docs.md | 44 --- .../getting-started/dependencies/class.md | 0 .../getting-started/dependencies/global.md | 3 - .../getting-started/dependencies/index.md | 163 -------- .../docs/getting-started/dependencies/sub.md | 0 .../getting-started/dependencies/testing.md | 3 - .../getting-started/dependencies/yield.md | 0 search/docs/getting-started/index.md | 39 -- .../integrations/fastapi/index.md | 73 ---- .../integrations/frameworks/index.md | 16 - search/docs/getting-started/lifespan/hooks.md | 119 ------ search/docs/getting-started/lifespan/index.md | 11 - search/docs/getting-started/lifespan/test.md | 12 - search/docs/getting-started/logging.md | 208 ----------- .../docs/getting-started/middlewares/index.md | 85 ----- .../docs/getting-started/publishing/broker.md | 20 - .../getting-started/publishing/decorator.md | 34 -- .../docs/getting-started/publishing/direct.md | 27 -- .../docs/getting-started/publishing/index.md | 35 -- .../docs/getting-started/publishing/object.md | 34 -- .../docs/getting-started/publishing/test.md | 62 --- search/docs/getting-started/routers/index.md | 51 --- .../getting-started/serialization/decoder.md | 84 ----- .../getting-started/serialization/examples.md | 123 ------ .../getting-started/serialization/index.md | 20 - .../getting-started/serialization/parser.md | 106 ------ .../subscription/annotation.md | 61 --- .../getting-started/subscription/filtering.md | 60 --- .../getting-started/subscription/index.md | 138 ------- .../getting-started/subscription/pydantic.md | 43 --- .../docs/getting-started/subscription/test.md | 152 -------- search/docs/getting-started/template/index.md | 207 ---------- search/docs/index.md | 352 ------------------ .../docs/kafka/Publisher/batch_publisher.md | 96 ----- search/docs/kafka/Publisher/index.md | 126 ------- search/docs/kafka/Publisher/using_a_key.md | 61 --- .../docs/kafka/Subscriber/batch_subscriber.md | 58 --- search/docs/kafka/Subscriber/index.md | 78 ---- search/docs/kafka/ack.md | 88 ----- search/docs/kafka/index.md | 61 --- search/docs/kafka/message.md | 53 --- search/docs/nats/examples/direct.md | 96 ----- search/docs/nats/examples/pattern.md | 87 ----- search/docs/nats/index.md | 35 -- search/docs/nats/jetstream/ack.md | 78 ---- search/docs/nats/jetstream/index.md | 53 --- search/docs/nats/jetstream/key-value.md | 107 ------ search/docs/nats/jetstream/object.md | 111 ------ search/docs/nats/message.md | 104 ------ search/docs/nats/publishing/index.md | 40 -- search/docs/nats/rpc.md | 45 --- search/docs/rabbit/ack.md | 90 ----- search/docs/rabbit/declare.md | 40 -- search/docs/rabbit/examples/direct.md | 129 ------- search/docs/rabbit/examples/fanout.md | 93 ----- search/docs/rabbit/examples/headers.md | 181 --------- search/docs/rabbit/examples/index.md | 36 -- search/docs/rabbit/examples/stream.md | 36 -- search/docs/rabbit/examples/topic.md | 113 ------ search/docs/rabbit/index.md | 69 ---- search/docs/rabbit/message.md | 104 ------ search/docs/rabbit/publishing.md | 83 ----- search/docs/rabbit/rpc.md | 42 --- search/docs/release.md | 36 -- 276 files changed, 6439 deletions(-) delete mode 100644 search/docs/api/faststream/app/ABCApp.md delete mode 100644 search/docs/api/faststream/app/FastStream.md delete mode 100644 search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md delete mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md delete mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md delete mode 100644 search/docs/api/faststream/asyncapi/generate/get_app_schema.md delete mode 100644 search/docs/api/faststream/asyncapi/message/get_model_schema.md delete mode 100644 search/docs/api/faststream/asyncapi/message/get_response_schema.md delete mode 100644 search/docs/api/faststream/asyncapi/message/parse_handler_params.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/channels/Channel.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/Contact.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/ContactDict.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/EmailStr.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/Info.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/License.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/main/Components.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/main/Schema.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/message/Message.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/operations/Operation.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/servers/Server.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Parameter.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Reference.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/Tag.md delete mode 100644 search/docs/api/faststream/asyncapi/schema/utils/TagDict.md delete mode 100644 search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md delete mode 100644 search/docs/api/faststream/asyncapi/site/serve_app.md delete mode 100644 search/docs/api/faststream/asyncapi/utils/resolve_payloads.md delete mode 100644 search/docs/api/faststream/asyncapi/utils/to_camelcase.md delete mode 100644 search/docs/api/faststream/broker/core/abc/BrokerUsecase.md delete mode 100644 search/docs/api/faststream/broker/core/abc/extend_dependencies.md delete mode 100644 search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md delete mode 100644 search/docs/api/faststream/broker/core/asyncronous/default_filter.md delete mode 100644 search/docs/api/faststream/broker/core/mixins/LoggingMixin.md delete mode 100644 search/docs/api/faststream/broker/fastapi/route/StreamMessage.md delete mode 100644 search/docs/api/faststream/broker/fastapi/route/StreamRoute.md delete mode 100644 search/docs/api/faststream/broker/fastapi/route/get_app.md delete mode 100644 search/docs/api/faststream/broker/fastapi/router/StreamRouter.md delete mode 100644 search/docs/api/faststream/broker/handler/AsyncHandler.md delete mode 100644 search/docs/api/faststream/broker/handler/BaseHandler.md delete mode 100644 search/docs/api/faststream/broker/message/ABCStreamMessage.md delete mode 100644 search/docs/api/faststream/broker/message/StreamMessage.md delete mode 100644 search/docs/api/faststream/broker/message/SyncStreamMessage.md delete mode 100644 search/docs/api/faststream/broker/middlewares/BaseMiddleware.md delete mode 100644 search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md delete mode 100644 search/docs/api/faststream/broker/parsers/decode_message.md delete mode 100644 search/docs/api/faststream/broker/parsers/encode_message.md delete mode 100644 search/docs/api/faststream/broker/parsers/resolve_custom_func.md delete mode 100644 search/docs/api/faststream/broker/publisher/BasePublisher.md delete mode 100644 search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md delete mode 100644 search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md delete mode 100644 search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md delete mode 100644 search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md delete mode 100644 search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md delete mode 100644 search/docs/api/faststream/broker/router/BrokerRoute.md delete mode 100644 search/docs/api/faststream/broker/router/BrokerRouter.md delete mode 100644 search/docs/api/faststream/broker/schemas/NameRequired.md delete mode 100644 search/docs/api/faststream/broker/schemas/RawDecoced.md delete mode 100644 search/docs/api/faststream/broker/security/BaseSecurity.md delete mode 100644 search/docs/api/faststream/broker/security/SASLPlaintext.md delete mode 100644 search/docs/api/faststream/broker/security/SASLScram256.md delete mode 100644 search/docs/api/faststream/broker/security/SASLScram512.md delete mode 100644 search/docs/api/faststream/broker/test/TestApp.md delete mode 100644 search/docs/api/faststream/broker/test/TestBroker.md delete mode 100644 search/docs/api/faststream/broker/test/call_handler.md delete mode 100644 search/docs/api/faststream/broker/test/patch_broker_calls.md delete mode 100644 search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md delete mode 100644 search/docs/api/faststream/broker/utils/change_logger_handlers.md delete mode 100644 search/docs/api/faststream/broker/utils/get_watcher.md delete mode 100644 search/docs/api/faststream/broker/utils/set_message_context.md delete mode 100644 search/docs/api/faststream/broker/wrapper/FakePublisher.md delete mode 100644 search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md delete mode 100644 search/docs/api/faststream/cli/docs/app/gen.md delete mode 100644 search/docs/api/faststream/cli/docs/app/serve.md delete mode 100644 search/docs/api/faststream/cli/main/main.md delete mode 100644 search/docs/api/faststream/cli/main/run.md delete mode 100644 search/docs/api/faststream/cli/main/version_callback.md delete mode 100644 search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md delete mode 100644 search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md delete mode 100644 search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md delete mode 100644 search/docs/api/faststream/cli/supervisors/utils/set_exit.md delete mode 100644 search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md delete mode 100644 search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md delete mode 100644 search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md delete mode 100644 search/docs/api/faststream/cli/utils/imports/get_app_path.md delete mode 100644 search/docs/api/faststream/cli/utils/imports/import_object.md delete mode 100644 search/docs/api/faststream/cli/utils/imports/try_import_app.md delete mode 100644 search/docs/api/faststream/cli/utils/logs/LogLevels.md delete mode 100644 search/docs/api/faststream/cli/utils/logs/get_log_level.md delete mode 100644 search/docs/api/faststream/cli/utils/logs/set_log_level.md delete mode 100644 search/docs/api/faststream/cli/utils/parser/parse_cli_args.md delete mode 100644 search/docs/api/faststream/cli/utils/parser/remove_prefix.md delete mode 100644 search/docs/api/faststream/constants/ContentTypes.md delete mode 100644 search/docs/api/faststream/exceptions/AckMessage.md delete mode 100644 search/docs/api/faststream/exceptions/HandlerException.md delete mode 100644 search/docs/api/faststream/exceptions/NackMessage.md delete mode 100644 search/docs/api/faststream/exceptions/RejectMessage.md delete mode 100644 search/docs/api/faststream/exceptions/SkipMessage.md delete mode 100644 search/docs/api/faststream/exceptions/StopConsume.md delete mode 100644 search/docs/api/faststream/kafka/asyncapi/Handler.md delete mode 100644 search/docs/api/faststream/kafka/asyncapi/Publisher.md delete mode 100644 search/docs/api/faststream/kafka/broker/KafkaBroker.md delete mode 100644 search/docs/api/faststream/kafka/fastapi/KafkaRouter.md delete mode 100644 search/docs/api/faststream/kafka/handler/LogicHandler.md delete mode 100644 search/docs/api/faststream/kafka/message/KafkaMessage.md delete mode 100644 search/docs/api/faststream/kafka/parser/AioKafkaParser.md delete mode 100644 search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md delete mode 100644 search/docs/api/faststream/kafka/publisher/LogicPublisher.md delete mode 100644 search/docs/api/faststream/kafka/router/KafkaRouter.md delete mode 100644 search/docs/api/faststream/kafka/security/parse_security.md delete mode 100644 search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md delete mode 100644 search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md delete mode 100644 search/docs/api/faststream/kafka/shared/router/KafkaRouter.md delete mode 100644 search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md delete mode 100644 search/docs/api/faststream/kafka/test/FakeProducer.md delete mode 100644 search/docs/api/faststream/kafka/test/TestKafkaBroker.md delete mode 100644 search/docs/api/faststream/kafka/test/build_message.md delete mode 100644 search/docs/api/faststream/log/formatter/ColourizedFormatter.md delete mode 100644 search/docs/api/faststream/log/formatter/expand_log_field.md delete mode 100644 search/docs/api/faststream/log/formatter/make_record_with_extra.md delete mode 100644 search/docs/api/faststream/log/logging/configure_formatter.md delete mode 100644 search/docs/api/faststream/nats/asyncapi/Handler.md delete mode 100644 search/docs/api/faststream/nats/asyncapi/Publisher.md delete mode 100644 search/docs/api/faststream/nats/broker/NatsBroker.md delete mode 100644 search/docs/api/faststream/nats/fastapi/NatsRouter.md delete mode 100644 search/docs/api/faststream/nats/handler/LogicNatsHandler.md delete mode 100644 search/docs/api/faststream/nats/helpers/StreamBuilder.md delete mode 100644 search/docs/api/faststream/nats/js_stream/JStream.md delete mode 100644 search/docs/api/faststream/nats/message/NatsMessage.md delete mode 100644 search/docs/api/faststream/nats/parser/NatsParser.md delete mode 100644 search/docs/api/faststream/nats/producer/NatsFastProducer.md delete mode 100644 search/docs/api/faststream/nats/producer/NatsJSFastProducer.md delete mode 100644 search/docs/api/faststream/nats/publisher/LogicPublisher.md delete mode 100644 search/docs/api/faststream/nats/router/NatsRouter.md delete mode 100644 search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md delete mode 100644 search/docs/api/faststream/nats/shared/router/NatsRouter.md delete mode 100644 search/docs/api/faststream/nats/test/FakeProducer.md delete mode 100644 search/docs/api/faststream/nats/test/PatchedMessage.md delete mode 100644 search/docs/api/faststream/nats/test/TestNatsBroker.md delete mode 100644 search/docs/api/faststream/nats/test/build_message.md delete mode 100644 search/docs/api/faststream/rabbit/asyncapi/Handler.md delete mode 100644 search/docs/api/faststream/rabbit/asyncapi/Publisher.md delete mode 100644 search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md delete mode 100644 search/docs/api/faststream/rabbit/broker/RabbitBroker.md delete mode 100644 search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md delete mode 100644 search/docs/api/faststream/rabbit/handler/LogicHandler.md delete mode 100644 search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md delete mode 100644 search/docs/api/faststream/rabbit/message/RabbitMessage.md delete mode 100644 search/docs/api/faststream/rabbit/parser/AioPikaParser.md delete mode 100644 search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md delete mode 100644 search/docs/api/faststream/rabbit/publisher/LogicPublisher.md delete mode 100644 search/docs/api/faststream/rabbit/router/RabbitRouter.md delete mode 100644 search/docs/api/faststream/rabbit/security/parse_security.md delete mode 100644 search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md delete mode 100644 search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md delete mode 100644 search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md delete mode 100644 search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md delete mode 100644 search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md delete mode 100644 search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md delete mode 100644 search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md delete mode 100644 search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md delete mode 100644 search/docs/api/faststream/rabbit/test/FakeProducer.md delete mode 100644 search/docs/api/faststream/rabbit/test/PatchedMessage.md delete mode 100644 search/docs/api/faststream/rabbit/test/TestRabbitBroker.md delete mode 100644 search/docs/api/faststream/rabbit/test/build_message.md delete mode 100644 search/docs/api/faststream/utils/classes/Singleton.md delete mode 100644 search/docs/api/faststream/utils/context/main/ContextRepo.md delete mode 100644 search/docs/api/faststream/utils/context/types/Context.md delete mode 100644 search/docs/api/faststream/utils/context/types/resolve_context.md delete mode 100644 search/docs/api/faststream/utils/data/filter_by_dict.md delete mode 100644 search/docs/api/faststream/utils/functions/get_function_positional_arguments.md delete mode 100644 search/docs/api/faststream/utils/functions/timeout_scope.md delete mode 100644 search/docs/api/faststream/utils/functions/to_async.md delete mode 100644 search/docs/api/faststream/utils/no_cast/NoCast.md delete mode 100644 search/docs/getting-started/asyncapi/custom.md delete mode 100644 search/docs/getting-started/asyncapi/export.md delete mode 100644 search/docs/getting-started/asyncapi/hosting.md delete mode 100644 search/docs/getting-started/cli/index.md delete mode 100644 search/docs/getting-started/config/index.md delete mode 100644 search/docs/getting-started/context/custom.md delete mode 100644 search/docs/getting-started/context/existed.md delete mode 100644 search/docs/getting-started/context/extra.md delete mode 100644 search/docs/getting-started/context/fields.md delete mode 100644 search/docs/getting-started/context/index.md delete mode 100644 search/docs/getting-started/contributing/CONTRIBUTING.md delete mode 100644 search/docs/getting-started/contributing/docs.md delete mode 100644 search/docs/getting-started/dependencies/class.md delete mode 100644 search/docs/getting-started/dependencies/global.md delete mode 100644 search/docs/getting-started/dependencies/index.md delete mode 100644 search/docs/getting-started/dependencies/sub.md delete mode 100644 search/docs/getting-started/dependencies/testing.md delete mode 100644 search/docs/getting-started/dependencies/yield.md delete mode 100644 search/docs/getting-started/index.md delete mode 100644 search/docs/getting-started/integrations/fastapi/index.md delete mode 100644 search/docs/getting-started/integrations/frameworks/index.md delete mode 100644 search/docs/getting-started/lifespan/hooks.md delete mode 100644 search/docs/getting-started/lifespan/index.md delete mode 100644 search/docs/getting-started/lifespan/test.md delete mode 100644 search/docs/getting-started/logging.md delete mode 100644 search/docs/getting-started/middlewares/index.md delete mode 100644 search/docs/getting-started/publishing/broker.md delete mode 100644 search/docs/getting-started/publishing/decorator.md delete mode 100644 search/docs/getting-started/publishing/direct.md delete mode 100644 search/docs/getting-started/publishing/index.md delete mode 100644 search/docs/getting-started/publishing/object.md delete mode 100644 search/docs/getting-started/publishing/test.md delete mode 100644 search/docs/getting-started/routers/index.md delete mode 100644 search/docs/getting-started/serialization/decoder.md delete mode 100644 search/docs/getting-started/serialization/examples.md delete mode 100644 search/docs/getting-started/serialization/index.md delete mode 100644 search/docs/getting-started/serialization/parser.md delete mode 100644 search/docs/getting-started/subscription/annotation.md delete mode 100644 search/docs/getting-started/subscription/filtering.md delete mode 100644 search/docs/getting-started/subscription/index.md delete mode 100644 search/docs/getting-started/subscription/pydantic.md delete mode 100644 search/docs/getting-started/subscription/test.md delete mode 100644 search/docs/getting-started/template/index.md delete mode 100644 search/docs/index.md delete mode 100644 search/docs/kafka/Publisher/batch_publisher.md delete mode 100644 search/docs/kafka/Publisher/index.md delete mode 100644 search/docs/kafka/Publisher/using_a_key.md delete mode 100644 search/docs/kafka/Subscriber/batch_subscriber.md delete mode 100644 search/docs/kafka/Subscriber/index.md delete mode 100644 search/docs/kafka/ack.md delete mode 100644 search/docs/kafka/index.md delete mode 100644 search/docs/kafka/message.md delete mode 100644 search/docs/nats/examples/direct.md delete mode 100644 search/docs/nats/examples/pattern.md delete mode 100644 search/docs/nats/index.md delete mode 100644 search/docs/nats/jetstream/ack.md delete mode 100644 search/docs/nats/jetstream/index.md delete mode 100644 search/docs/nats/jetstream/key-value.md delete mode 100644 search/docs/nats/jetstream/object.md delete mode 100644 search/docs/nats/message.md delete mode 100644 search/docs/nats/publishing/index.md delete mode 100644 search/docs/nats/rpc.md delete mode 100644 search/docs/rabbit/ack.md delete mode 100644 search/docs/rabbit/declare.md delete mode 100644 search/docs/rabbit/examples/direct.md delete mode 100644 search/docs/rabbit/examples/fanout.md delete mode 100644 search/docs/rabbit/examples/headers.md delete mode 100644 search/docs/rabbit/examples/index.md delete mode 100644 search/docs/rabbit/examples/stream.md delete mode 100644 search/docs/rabbit/examples/topic.md delete mode 100644 search/docs/rabbit/index.md delete mode 100644 search/docs/rabbit/message.md delete mode 100644 search/docs/rabbit/publishing.md delete mode 100644 search/docs/rabbit/rpc.md delete mode 100644 search/docs/release.md diff --git a/search/docs/api/faststream/app/ABCApp.md b/search/docs/api/faststream/app/ABCApp.md deleted file mode 100644 index 7f2d1b8..0000000 --- a/search/docs/api/faststream/app/ABCApp.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.app.ABCApp diff --git a/search/docs/api/faststream/app/FastStream.md b/search/docs/api/faststream/app/FastStream.md deleted file mode 100644 index 769b229..0000000 --- a/search/docs/api/faststream/app/FastStream.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.app.FastStream diff --git a/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md b/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md deleted file mode 100644 index 8b9a5b3..0000000 --- a/search/docs/api/faststream/asyncapi/base/AsyncAPIOperation.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.base.AsyncAPIOperation diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md b/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md deleted file mode 100644 index 4968e92..0000000 --- a/search/docs/api/faststream/asyncapi/generate/get_app_broker_channels.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.generate.get_app_broker_channels diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md b/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md deleted file mode 100644 index 97bce7f..0000000 --- a/search/docs/api/faststream/asyncapi/generate/get_app_broker_server.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.generate.get_app_broker_server diff --git a/search/docs/api/faststream/asyncapi/generate/get_app_schema.md b/search/docs/api/faststream/asyncapi/generate/get_app_schema.md deleted file mode 100644 index 77e1a64..0000000 --- a/search/docs/api/faststream/asyncapi/generate/get_app_schema.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.generate.get_app_schema diff --git a/search/docs/api/faststream/asyncapi/message/get_model_schema.md b/search/docs/api/faststream/asyncapi/message/get_model_schema.md deleted file mode 100644 index c3ec4bb..0000000 --- a/search/docs/api/faststream/asyncapi/message/get_model_schema.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.message.get_model_schema diff --git a/search/docs/api/faststream/asyncapi/message/get_response_schema.md b/search/docs/api/faststream/asyncapi/message/get_response_schema.md deleted file mode 100644 index 3a8eb1e..0000000 --- a/search/docs/api/faststream/asyncapi/message/get_response_schema.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.message.get_response_schema diff --git a/search/docs/api/faststream/asyncapi/message/parse_handler_params.md b/search/docs/api/faststream/asyncapi/message/parse_handler_params.md deleted file mode 100644 index c530266..0000000 --- a/search/docs/api/faststream/asyncapi/message/parse_handler_params.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.message.parse_handler_params diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md deleted file mode 100644 index 443e316..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.amqp.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md deleted file mode 100644 index f38ab3e..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Exchange.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.amqp.Exchange diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md deleted file mode 100644 index ced9de0..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.amqp.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md deleted file mode 100644 index 47ce84c..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/Queue.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.amqp.Queue diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md deleted file mode 100644 index 29d4033..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/amqp/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.amqp.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md deleted file mode 100644 index e3e9748..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.kafka.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md deleted file mode 100644 index b231a6f..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.kafka.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md deleted file mode 100644 index 85183b4..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/kafka/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.kafka.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md deleted file mode 100644 index 8db79c7..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/main/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.main.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md deleted file mode 100644 index 0492f07..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/main/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.main.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md deleted file mode 100644 index 3250e4d..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/main/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.main.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md deleted file mode 100644 index 1195aa6..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.nats.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md deleted file mode 100644 index 5c93a78..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/nats/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.nats.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md deleted file mode 100644 index 8cde582..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/nats/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.nats.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md deleted file mode 100644 index 44dc374..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.redis.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md deleted file mode 100644 index 1d8b38e..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/redis/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.redis.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md deleted file mode 100644 index 25ae5b8..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/redis/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.redis.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md deleted file mode 100644 index da40112..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ChannelBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.sqs.ChannelBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md deleted file mode 100644 index a8bcaca..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/OperationBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.sqs.OperationBinding diff --git a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md b/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md deleted file mode 100644 index f3228be..0000000 --- a/search/docs/api/faststream/asyncapi/schema/bindings/sqs/ServerBinding.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.bindings.sqs.ServerBinding diff --git a/search/docs/api/faststream/asyncapi/schema/channels/Channel.md b/search/docs/api/faststream/asyncapi/schema/channels/Channel.md deleted file mode 100644 index b283b20..0000000 --- a/search/docs/api/faststream/asyncapi/schema/channels/Channel.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.channels.Channel diff --git a/search/docs/api/faststream/asyncapi/schema/info/Contact.md b/search/docs/api/faststream/asyncapi/schema/info/Contact.md deleted file mode 100644 index bcc8ff0..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/Contact.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.Contact diff --git a/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md b/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md deleted file mode 100644 index ffd0af9..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/ContactDict.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.ContactDict diff --git a/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md b/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md deleted file mode 100644 index eb5ba9d..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/EmailStr.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.EmailStr diff --git a/search/docs/api/faststream/asyncapi/schema/info/Info.md b/search/docs/api/faststream/asyncapi/schema/info/Info.md deleted file mode 100644 index ba556ec..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/Info.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.Info diff --git a/search/docs/api/faststream/asyncapi/schema/info/License.md b/search/docs/api/faststream/asyncapi/schema/info/License.md deleted file mode 100644 index 0be1fa3..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/License.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.License diff --git a/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md b/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md deleted file mode 100644 index f6e4541..0000000 --- a/search/docs/api/faststream/asyncapi/schema/info/LicenseDict.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.info.LicenseDict diff --git a/search/docs/api/faststream/asyncapi/schema/main/Components.md b/search/docs/api/faststream/asyncapi/schema/main/Components.md deleted file mode 100644 index 1e70196..0000000 --- a/search/docs/api/faststream/asyncapi/schema/main/Components.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.main.Components diff --git a/search/docs/api/faststream/asyncapi/schema/main/Schema.md b/search/docs/api/faststream/asyncapi/schema/main/Schema.md deleted file mode 100644 index 0c1aee8..0000000 --- a/search/docs/api/faststream/asyncapi/schema/main/Schema.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.main.Schema diff --git a/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md b/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md deleted file mode 100644 index fb1b81c..0000000 --- a/search/docs/api/faststream/asyncapi/schema/message/CorrelationId.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.message.CorrelationId diff --git a/search/docs/api/faststream/asyncapi/schema/message/Message.md b/search/docs/api/faststream/asyncapi/schema/message/Message.md deleted file mode 100644 index 194c715..0000000 --- a/search/docs/api/faststream/asyncapi/schema/message/Message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.message.Message diff --git a/search/docs/api/faststream/asyncapi/schema/operations/Operation.md b/search/docs/api/faststream/asyncapi/schema/operations/Operation.md deleted file mode 100644 index 9e331ce..0000000 --- a/search/docs/api/faststream/asyncapi/schema/operations/Operation.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.operations.Operation diff --git a/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md b/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md deleted file mode 100644 index adf303b..0000000 --- a/search/docs/api/faststream/asyncapi/schema/security/OauthFlowObj.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.security.OauthFlowObj diff --git a/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md b/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md deleted file mode 100644 index 6e322f2..0000000 --- a/search/docs/api/faststream/asyncapi/schema/security/OauthFlows.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.security.OauthFlows diff --git a/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md b/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md deleted file mode 100644 index 69329cb..0000000 --- a/search/docs/api/faststream/asyncapi/schema/security/SecuritySchemaComponent.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.security.SecuritySchemaComponent diff --git a/search/docs/api/faststream/asyncapi/schema/servers/Server.md b/search/docs/api/faststream/asyncapi/schema/servers/Server.md deleted file mode 100644 index 48c47c5..0000000 --- a/search/docs/api/faststream/asyncapi/schema/servers/Server.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.servers.Server diff --git a/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md b/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md deleted file mode 100644 index d34e6ca..0000000 --- a/search/docs/api/faststream/asyncapi/schema/servers/ServerVariable.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.servers.ServerVariable diff --git a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md deleted file mode 100644 index aca23a7..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocs.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.ExternalDocs diff --git a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md b/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md deleted file mode 100644 index b9e5448..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/ExternalDocsDict.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.ExternalDocsDict diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md b/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md deleted file mode 100644 index d812f0d..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/Parameter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.Parameter diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Reference.md b/search/docs/api/faststream/asyncapi/schema/utils/Reference.md deleted file mode 100644 index 8af2feb..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/Reference.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.Reference diff --git a/search/docs/api/faststream/asyncapi/schema/utils/Tag.md b/search/docs/api/faststream/asyncapi/schema/utils/Tag.md deleted file mode 100644 index 581e235..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/Tag.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.Tag diff --git a/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md b/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md deleted file mode 100644 index 66cec99..0000000 --- a/search/docs/api/faststream/asyncapi/schema/utils/TagDict.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.schema.utils.TagDict diff --git a/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md b/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md deleted file mode 100644 index e98f7d4..0000000 --- a/search/docs/api/faststream/asyncapi/site/get_asyncapi_html.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.site.get_asyncapi_html diff --git a/search/docs/api/faststream/asyncapi/site/serve_app.md b/search/docs/api/faststream/asyncapi/site/serve_app.md deleted file mode 100644 index 18f9b0d..0000000 --- a/search/docs/api/faststream/asyncapi/site/serve_app.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.site.serve_app diff --git a/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md b/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md deleted file mode 100644 index 40809e1..0000000 --- a/search/docs/api/faststream/asyncapi/utils/resolve_payloads.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.utils.resolve_payloads diff --git a/search/docs/api/faststream/asyncapi/utils/to_camelcase.md b/search/docs/api/faststream/asyncapi/utils/to_camelcase.md deleted file mode 100644 index 40cd64a..0000000 --- a/search/docs/api/faststream/asyncapi/utils/to_camelcase.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.asyncapi.utils.to_camelcase diff --git a/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md b/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md deleted file mode 100644 index 4a1c3c9..0000000 --- a/search/docs/api/faststream/broker/core/abc/BrokerUsecase.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.core.abc.BrokerUsecase diff --git a/search/docs/api/faststream/broker/core/abc/extend_dependencies.md b/search/docs/api/faststream/broker/core/abc/extend_dependencies.md deleted file mode 100644 index d89d80e..0000000 --- a/search/docs/api/faststream/broker/core/abc/extend_dependencies.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.core.abc.extend_dependencies diff --git a/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md b/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md deleted file mode 100644 index 507344a..0000000 --- a/search/docs/api/faststream/broker/core/asyncronous/BrokerAsyncUsecase.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.core.asyncronous.BrokerAsyncUsecase diff --git a/search/docs/api/faststream/broker/core/asyncronous/default_filter.md b/search/docs/api/faststream/broker/core/asyncronous/default_filter.md deleted file mode 100644 index b40ad64..0000000 --- a/search/docs/api/faststream/broker/core/asyncronous/default_filter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.core.asyncronous.default_filter diff --git a/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md b/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md deleted file mode 100644 index 74154ac..0000000 --- a/search/docs/api/faststream/broker/core/mixins/LoggingMixin.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.core.mixins.LoggingMixin diff --git a/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md b/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md deleted file mode 100644 index 7b19856..0000000 --- a/search/docs/api/faststream/broker/fastapi/route/StreamMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.fastapi.route.StreamMessage diff --git a/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md b/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md deleted file mode 100644 index e067a18..0000000 --- a/search/docs/api/faststream/broker/fastapi/route/StreamRoute.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.fastapi.route.StreamRoute diff --git a/search/docs/api/faststream/broker/fastapi/route/get_app.md b/search/docs/api/faststream/broker/fastapi/route/get_app.md deleted file mode 100644 index 3df3d57..0000000 --- a/search/docs/api/faststream/broker/fastapi/route/get_app.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.fastapi.route.get_app diff --git a/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md b/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md deleted file mode 100644 index f0ee7f0..0000000 --- a/search/docs/api/faststream/broker/fastapi/router/StreamRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.fastapi.router.StreamRouter diff --git a/search/docs/api/faststream/broker/handler/AsyncHandler.md b/search/docs/api/faststream/broker/handler/AsyncHandler.md deleted file mode 100644 index cc291a0..0000000 --- a/search/docs/api/faststream/broker/handler/AsyncHandler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.handler.AsyncHandler diff --git a/search/docs/api/faststream/broker/handler/BaseHandler.md b/search/docs/api/faststream/broker/handler/BaseHandler.md deleted file mode 100644 index 193dfad..0000000 --- a/search/docs/api/faststream/broker/handler/BaseHandler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.handler.BaseHandler diff --git a/search/docs/api/faststream/broker/message/ABCStreamMessage.md b/search/docs/api/faststream/broker/message/ABCStreamMessage.md deleted file mode 100644 index 7b8f52f..0000000 --- a/search/docs/api/faststream/broker/message/ABCStreamMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.message.ABCStreamMessage diff --git a/search/docs/api/faststream/broker/message/StreamMessage.md b/search/docs/api/faststream/broker/message/StreamMessage.md deleted file mode 100644 index f87c3e8..0000000 --- a/search/docs/api/faststream/broker/message/StreamMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.message.StreamMessage diff --git a/search/docs/api/faststream/broker/message/SyncStreamMessage.md b/search/docs/api/faststream/broker/message/SyncStreamMessage.md deleted file mode 100644 index b1c8c34..0000000 --- a/search/docs/api/faststream/broker/message/SyncStreamMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.message.SyncStreamMessage diff --git a/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md b/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md deleted file mode 100644 index 417d231..0000000 --- a/search/docs/api/faststream/broker/middlewares/BaseMiddleware.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.middlewares.BaseMiddleware diff --git a/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md b/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md deleted file mode 100644 index fe8a095..0000000 --- a/search/docs/api/faststream/broker/middlewares/CriticalLogMiddleware.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.middlewares.CriticalLogMiddleware diff --git a/search/docs/api/faststream/broker/parsers/decode_message.md b/search/docs/api/faststream/broker/parsers/decode_message.md deleted file mode 100644 index 8664120..0000000 --- a/search/docs/api/faststream/broker/parsers/decode_message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.parsers.decode_message diff --git a/search/docs/api/faststream/broker/parsers/encode_message.md b/search/docs/api/faststream/broker/parsers/encode_message.md deleted file mode 100644 index 0fb74e3..0000000 --- a/search/docs/api/faststream/broker/parsers/encode_message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.parsers.encode_message diff --git a/search/docs/api/faststream/broker/parsers/resolve_custom_func.md b/search/docs/api/faststream/broker/parsers/resolve_custom_func.md deleted file mode 100644 index 3e8ab0d..0000000 --- a/search/docs/api/faststream/broker/parsers/resolve_custom_func.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.parsers.resolve_custom_func diff --git a/search/docs/api/faststream/broker/publisher/BasePublisher.md b/search/docs/api/faststream/broker/publisher/BasePublisher.md deleted file mode 100644 index 8118cc4..0000000 --- a/search/docs/api/faststream/broker/publisher/BasePublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.publisher.BasePublisher diff --git a/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md deleted file mode 100644 index 3a74eed..0000000 --- a/search/docs/api/faststream/broker/push_back_watcher/BaseWatcher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.push_back_watcher.BaseWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md deleted file mode 100644 index e8616c5..0000000 --- a/search/docs/api/faststream/broker/push_back_watcher/CounterWatcher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.push_back_watcher.CounterWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md deleted file mode 100644 index 2ad32a8..0000000 --- a/search/docs/api/faststream/broker/push_back_watcher/EndlessWatcher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.push_back_watcher.EndlessWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md b/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md deleted file mode 100644 index 736f601..0000000 --- a/search/docs/api/faststream/broker/push_back_watcher/OneTryWatcher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.push_back_watcher.OneTryWatcher diff --git a/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md b/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md deleted file mode 100644 index 773e3cf..0000000 --- a/search/docs/api/faststream/broker/push_back_watcher/WatcherContext.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.push_back_watcher.WatcherContext diff --git a/search/docs/api/faststream/broker/router/BrokerRoute.md b/search/docs/api/faststream/broker/router/BrokerRoute.md deleted file mode 100644 index c3b2c4a..0000000 --- a/search/docs/api/faststream/broker/router/BrokerRoute.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.router.BrokerRoute diff --git a/search/docs/api/faststream/broker/router/BrokerRouter.md b/search/docs/api/faststream/broker/router/BrokerRouter.md deleted file mode 100644 index adecbf2..0000000 --- a/search/docs/api/faststream/broker/router/BrokerRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.router.BrokerRouter diff --git a/search/docs/api/faststream/broker/schemas/NameRequired.md b/search/docs/api/faststream/broker/schemas/NameRequired.md deleted file mode 100644 index 105588f..0000000 --- a/search/docs/api/faststream/broker/schemas/NameRequired.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.schemas.NameRequired diff --git a/search/docs/api/faststream/broker/schemas/RawDecoced.md b/search/docs/api/faststream/broker/schemas/RawDecoced.md deleted file mode 100644 index 968a1ee..0000000 --- a/search/docs/api/faststream/broker/schemas/RawDecoced.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.schemas.RawDecoced diff --git a/search/docs/api/faststream/broker/security/BaseSecurity.md b/search/docs/api/faststream/broker/security/BaseSecurity.md deleted file mode 100644 index a607c21..0000000 --- a/search/docs/api/faststream/broker/security/BaseSecurity.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.security.BaseSecurity diff --git a/search/docs/api/faststream/broker/security/SASLPlaintext.md b/search/docs/api/faststream/broker/security/SASLPlaintext.md deleted file mode 100644 index 4d56212..0000000 --- a/search/docs/api/faststream/broker/security/SASLPlaintext.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.security.SASLPlaintext diff --git a/search/docs/api/faststream/broker/security/SASLScram256.md b/search/docs/api/faststream/broker/security/SASLScram256.md deleted file mode 100644 index 29946ef..0000000 --- a/search/docs/api/faststream/broker/security/SASLScram256.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.security.SASLScram256 diff --git a/search/docs/api/faststream/broker/security/SASLScram512.md b/search/docs/api/faststream/broker/security/SASLScram512.md deleted file mode 100644 index 400ba46..0000000 --- a/search/docs/api/faststream/broker/security/SASLScram512.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.security.SASLScram512 diff --git a/search/docs/api/faststream/broker/test/TestApp.md b/search/docs/api/faststream/broker/test/TestApp.md deleted file mode 100644 index b659d00..0000000 --- a/search/docs/api/faststream/broker/test/TestApp.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.test.TestApp diff --git a/search/docs/api/faststream/broker/test/TestBroker.md b/search/docs/api/faststream/broker/test/TestBroker.md deleted file mode 100644 index 0094259..0000000 --- a/search/docs/api/faststream/broker/test/TestBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.test.TestBroker diff --git a/search/docs/api/faststream/broker/test/call_handler.md b/search/docs/api/faststream/broker/test/call_handler.md deleted file mode 100644 index 6a3341b..0000000 --- a/search/docs/api/faststream/broker/test/call_handler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.test.call_handler diff --git a/search/docs/api/faststream/broker/test/patch_broker_calls.md b/search/docs/api/faststream/broker/test/patch_broker_calls.md deleted file mode 100644 index 64606a2..0000000 --- a/search/docs/api/faststream/broker/test/patch_broker_calls.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.test.patch_broker_calls diff --git a/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md b/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md deleted file mode 100644 index 9b39782..0000000 --- a/search/docs/api/faststream/broker/types/AsyncPublisherProtocol.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.types.AsyncPublisherProtocol diff --git a/search/docs/api/faststream/broker/utils/change_logger_handlers.md b/search/docs/api/faststream/broker/utils/change_logger_handlers.md deleted file mode 100644 index 0522ea1..0000000 --- a/search/docs/api/faststream/broker/utils/change_logger_handlers.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.utils.change_logger_handlers diff --git a/search/docs/api/faststream/broker/utils/get_watcher.md b/search/docs/api/faststream/broker/utils/get_watcher.md deleted file mode 100644 index 7d6f0cc..0000000 --- a/search/docs/api/faststream/broker/utils/get_watcher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.utils.get_watcher diff --git a/search/docs/api/faststream/broker/utils/set_message_context.md b/search/docs/api/faststream/broker/utils/set_message_context.md deleted file mode 100644 index d8682fd..0000000 --- a/search/docs/api/faststream/broker/utils/set_message_context.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.utils.set_message_context diff --git a/search/docs/api/faststream/broker/wrapper/FakePublisher.md b/search/docs/api/faststream/broker/wrapper/FakePublisher.md deleted file mode 100644 index 4f9ec95..0000000 --- a/search/docs/api/faststream/broker/wrapper/FakePublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.wrapper.FakePublisher diff --git a/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md b/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md deleted file mode 100644 index fe9c10f..0000000 --- a/search/docs/api/faststream/broker/wrapper/HandlerCallWrapper.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.broker.wrapper.HandlerCallWrapper diff --git a/search/docs/api/faststream/cli/docs/app/gen.md b/search/docs/api/faststream/cli/docs/app/gen.md deleted file mode 100644 index 4c512fd..0000000 --- a/search/docs/api/faststream/cli/docs/app/gen.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.docs.app.gen diff --git a/search/docs/api/faststream/cli/docs/app/serve.md b/search/docs/api/faststream/cli/docs/app/serve.md deleted file mode 100644 index aaabc7a..0000000 --- a/search/docs/api/faststream/cli/docs/app/serve.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.docs.app.serve diff --git a/search/docs/api/faststream/cli/main/main.md b/search/docs/api/faststream/cli/main/main.md deleted file mode 100644 index 3de3347..0000000 --- a/search/docs/api/faststream/cli/main/main.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.main.main diff --git a/search/docs/api/faststream/cli/main/run.md b/search/docs/api/faststream/cli/main/run.md deleted file mode 100644 index 7c49ec6..0000000 --- a/search/docs/api/faststream/cli/main/run.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.main.run diff --git a/search/docs/api/faststream/cli/main/version_callback.md b/search/docs/api/faststream/cli/main/version_callback.md deleted file mode 100644 index 2889fc6..0000000 --- a/search/docs/api/faststream/cli/main/version_callback.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.main.version_callback diff --git a/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md b/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md deleted file mode 100644 index 37848d0..0000000 --- a/search/docs/api/faststream/cli/supervisors/basereload/BaseReload.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.basereload.BaseReload diff --git a/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md b/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md deleted file mode 100644 index ebd2fc6..0000000 --- a/search/docs/api/faststream/cli/supervisors/multiprocess/Multiprocess.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.multiprocess.Multiprocess diff --git a/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md b/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md deleted file mode 100644 index 159ddc9..0000000 --- a/search/docs/api/faststream/cli/supervisors/utils/get_subprocess.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.utils.get_subprocess diff --git a/search/docs/api/faststream/cli/supervisors/utils/set_exit.md b/search/docs/api/faststream/cli/supervisors/utils/set_exit.md deleted file mode 100644 index c2bb5fb..0000000 --- a/search/docs/api/faststream/cli/supervisors/utils/set_exit.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.utils.set_exit diff --git a/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md b/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md deleted file mode 100644 index 3cb1f84..0000000 --- a/search/docs/api/faststream/cli/supervisors/utils/subprocess_started.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.utils.subprocess_started diff --git a/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md b/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md deleted file mode 100644 index 93d8b7f..0000000 --- a/search/docs/api/faststream/cli/supervisors/watchfiles/ExtendedFilter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.watchfiles.ExtendedFilter diff --git a/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md b/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md deleted file mode 100644 index 4cca751..0000000 --- a/search/docs/api/faststream/cli/supervisors/watchfiles/WatchReloader.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.supervisors.watchfiles.WatchReloader diff --git a/search/docs/api/faststream/cli/utils/imports/get_app_path.md b/search/docs/api/faststream/cli/utils/imports/get_app_path.md deleted file mode 100644 index 85b8019..0000000 --- a/search/docs/api/faststream/cli/utils/imports/get_app_path.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.imports.get_app_path diff --git a/search/docs/api/faststream/cli/utils/imports/import_object.md b/search/docs/api/faststream/cli/utils/imports/import_object.md deleted file mode 100644 index 2eaa16f..0000000 --- a/search/docs/api/faststream/cli/utils/imports/import_object.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.imports.import_object diff --git a/search/docs/api/faststream/cli/utils/imports/try_import_app.md b/search/docs/api/faststream/cli/utils/imports/try_import_app.md deleted file mode 100644 index 97cc011..0000000 --- a/search/docs/api/faststream/cli/utils/imports/try_import_app.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.imports.try_import_app diff --git a/search/docs/api/faststream/cli/utils/logs/LogLevels.md b/search/docs/api/faststream/cli/utils/logs/LogLevels.md deleted file mode 100644 index 8f2cc1d..0000000 --- a/search/docs/api/faststream/cli/utils/logs/LogLevels.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.logs.LogLevels diff --git a/search/docs/api/faststream/cli/utils/logs/get_log_level.md b/search/docs/api/faststream/cli/utils/logs/get_log_level.md deleted file mode 100644 index e346cb9..0000000 --- a/search/docs/api/faststream/cli/utils/logs/get_log_level.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.logs.get_log_level diff --git a/search/docs/api/faststream/cli/utils/logs/set_log_level.md b/search/docs/api/faststream/cli/utils/logs/set_log_level.md deleted file mode 100644 index fcb0b33..0000000 --- a/search/docs/api/faststream/cli/utils/logs/set_log_level.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.logs.set_log_level diff --git a/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md b/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md deleted file mode 100644 index ae396af..0000000 --- a/search/docs/api/faststream/cli/utils/parser/parse_cli_args.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.parser.parse_cli_args diff --git a/search/docs/api/faststream/cli/utils/parser/remove_prefix.md b/search/docs/api/faststream/cli/utils/parser/remove_prefix.md deleted file mode 100644 index 2cc7d44..0000000 --- a/search/docs/api/faststream/cli/utils/parser/remove_prefix.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.cli.utils.parser.remove_prefix diff --git a/search/docs/api/faststream/constants/ContentTypes.md b/search/docs/api/faststream/constants/ContentTypes.md deleted file mode 100644 index 2d857ff..0000000 --- a/search/docs/api/faststream/constants/ContentTypes.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.constants.ContentTypes diff --git a/search/docs/api/faststream/exceptions/AckMessage.md b/search/docs/api/faststream/exceptions/AckMessage.md deleted file mode 100644 index b6193d6..0000000 --- a/search/docs/api/faststream/exceptions/AckMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.AckMessage diff --git a/search/docs/api/faststream/exceptions/HandlerException.md b/search/docs/api/faststream/exceptions/HandlerException.md deleted file mode 100644 index f0fff5d..0000000 --- a/search/docs/api/faststream/exceptions/HandlerException.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.HandlerException diff --git a/search/docs/api/faststream/exceptions/NackMessage.md b/search/docs/api/faststream/exceptions/NackMessage.md deleted file mode 100644 index e7ebafa..0000000 --- a/search/docs/api/faststream/exceptions/NackMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.NackMessage diff --git a/search/docs/api/faststream/exceptions/RejectMessage.md b/search/docs/api/faststream/exceptions/RejectMessage.md deleted file mode 100644 index 9579ad1..0000000 --- a/search/docs/api/faststream/exceptions/RejectMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.RejectMessage diff --git a/search/docs/api/faststream/exceptions/SkipMessage.md b/search/docs/api/faststream/exceptions/SkipMessage.md deleted file mode 100644 index 7ffe579..0000000 --- a/search/docs/api/faststream/exceptions/SkipMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.SkipMessage diff --git a/search/docs/api/faststream/exceptions/StopConsume.md b/search/docs/api/faststream/exceptions/StopConsume.md deleted file mode 100644 index e550003..0000000 --- a/search/docs/api/faststream/exceptions/StopConsume.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.exceptions.StopConsume diff --git a/search/docs/api/faststream/kafka/asyncapi/Handler.md b/search/docs/api/faststream/kafka/asyncapi/Handler.md deleted file mode 100644 index 5cef4ba..0000000 --- a/search/docs/api/faststream/kafka/asyncapi/Handler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.asyncapi.Handler diff --git a/search/docs/api/faststream/kafka/asyncapi/Publisher.md b/search/docs/api/faststream/kafka/asyncapi/Publisher.md deleted file mode 100644 index d91b8fd..0000000 --- a/search/docs/api/faststream/kafka/asyncapi/Publisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.asyncapi.Publisher diff --git a/search/docs/api/faststream/kafka/broker/KafkaBroker.md b/search/docs/api/faststream/kafka/broker/KafkaBroker.md deleted file mode 100644 index 64a046e..0000000 --- a/search/docs/api/faststream/kafka/broker/KafkaBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.broker.KafkaBroker diff --git a/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md b/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md deleted file mode 100644 index 2a24f25..0000000 --- a/search/docs/api/faststream/kafka/fastapi/KafkaRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.fastapi.KafkaRouter diff --git a/search/docs/api/faststream/kafka/handler/LogicHandler.md b/search/docs/api/faststream/kafka/handler/LogicHandler.md deleted file mode 100644 index 140e829..0000000 --- a/search/docs/api/faststream/kafka/handler/LogicHandler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.handler.LogicHandler diff --git a/search/docs/api/faststream/kafka/message/KafkaMessage.md b/search/docs/api/faststream/kafka/message/KafkaMessage.md deleted file mode 100644 index f8d81c2..0000000 --- a/search/docs/api/faststream/kafka/message/KafkaMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.message.KafkaMessage diff --git a/search/docs/api/faststream/kafka/parser/AioKafkaParser.md b/search/docs/api/faststream/kafka/parser/AioKafkaParser.md deleted file mode 100644 index dbb8f28..0000000 --- a/search/docs/api/faststream/kafka/parser/AioKafkaParser.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.parser.AioKafkaParser diff --git a/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md b/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md deleted file mode 100644 index f5884f9..0000000 --- a/search/docs/api/faststream/kafka/producer/AioKafkaFastProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.producer.AioKafkaFastProducer diff --git a/search/docs/api/faststream/kafka/publisher/LogicPublisher.md b/search/docs/api/faststream/kafka/publisher/LogicPublisher.md deleted file mode 100644 index 9e102cf..0000000 --- a/search/docs/api/faststream/kafka/publisher/LogicPublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.publisher.LogicPublisher diff --git a/search/docs/api/faststream/kafka/router/KafkaRouter.md b/search/docs/api/faststream/kafka/router/KafkaRouter.md deleted file mode 100644 index 9a4e764..0000000 --- a/search/docs/api/faststream/kafka/router/KafkaRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.router.KafkaRouter diff --git a/search/docs/api/faststream/kafka/security/parse_security.md b/search/docs/api/faststream/kafka/security/parse_security.md deleted file mode 100644 index e6efaf4..0000000 --- a/search/docs/api/faststream/kafka/security/parse_security.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.security.parse_security diff --git a/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md b/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md deleted file mode 100644 index 0187ef8..0000000 --- a/search/docs/api/faststream/kafka/shared/logging/KafkaLoggingMixin.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.shared.logging.KafkaLoggingMixin diff --git a/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md b/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md deleted file mode 100644 index fd2053e..0000000 --- a/search/docs/api/faststream/kafka/shared/publisher/ABCPublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.shared.publisher.ABCPublisher diff --git a/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md b/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md deleted file mode 100644 index 5fa9d63..0000000 --- a/search/docs/api/faststream/kafka/shared/router/KafkaRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.shared.router.KafkaRouter diff --git a/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md b/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md deleted file mode 100644 index 2480ec5..0000000 --- a/search/docs/api/faststream/kafka/shared/schemas/ConsumerConnectionParams.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.shared.schemas.ConsumerConnectionParams diff --git a/search/docs/api/faststream/kafka/test/FakeProducer.md b/search/docs/api/faststream/kafka/test/FakeProducer.md deleted file mode 100644 index 564ecbd..0000000 --- a/search/docs/api/faststream/kafka/test/FakeProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.test.FakeProducer diff --git a/search/docs/api/faststream/kafka/test/TestKafkaBroker.md b/search/docs/api/faststream/kafka/test/TestKafkaBroker.md deleted file mode 100644 index 33fb0a8..0000000 --- a/search/docs/api/faststream/kafka/test/TestKafkaBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.test.TestKafkaBroker diff --git a/search/docs/api/faststream/kafka/test/build_message.md b/search/docs/api/faststream/kafka/test/build_message.md deleted file mode 100644 index a1e6804..0000000 --- a/search/docs/api/faststream/kafka/test/build_message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.kafka.test.build_message diff --git a/search/docs/api/faststream/log/formatter/ColourizedFormatter.md b/search/docs/api/faststream/log/formatter/ColourizedFormatter.md deleted file mode 100644 index 7fad07c..0000000 --- a/search/docs/api/faststream/log/formatter/ColourizedFormatter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.log.formatter.ColourizedFormatter diff --git a/search/docs/api/faststream/log/formatter/expand_log_field.md b/search/docs/api/faststream/log/formatter/expand_log_field.md deleted file mode 100644 index cde1975..0000000 --- a/search/docs/api/faststream/log/formatter/expand_log_field.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.log.formatter.expand_log_field diff --git a/search/docs/api/faststream/log/formatter/make_record_with_extra.md b/search/docs/api/faststream/log/formatter/make_record_with_extra.md deleted file mode 100644 index cab315c..0000000 --- a/search/docs/api/faststream/log/formatter/make_record_with_extra.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.log.formatter.make_record_with_extra diff --git a/search/docs/api/faststream/log/logging/configure_formatter.md b/search/docs/api/faststream/log/logging/configure_formatter.md deleted file mode 100644 index ff96783..0000000 --- a/search/docs/api/faststream/log/logging/configure_formatter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.log.logging.configure_formatter diff --git a/search/docs/api/faststream/nats/asyncapi/Handler.md b/search/docs/api/faststream/nats/asyncapi/Handler.md deleted file mode 100644 index 7b94601..0000000 --- a/search/docs/api/faststream/nats/asyncapi/Handler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.asyncapi.Handler diff --git a/search/docs/api/faststream/nats/asyncapi/Publisher.md b/search/docs/api/faststream/nats/asyncapi/Publisher.md deleted file mode 100644 index 93912fa..0000000 --- a/search/docs/api/faststream/nats/asyncapi/Publisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.asyncapi.Publisher diff --git a/search/docs/api/faststream/nats/broker/NatsBroker.md b/search/docs/api/faststream/nats/broker/NatsBroker.md deleted file mode 100644 index 10aa12b..0000000 --- a/search/docs/api/faststream/nats/broker/NatsBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.broker.NatsBroker diff --git a/search/docs/api/faststream/nats/fastapi/NatsRouter.md b/search/docs/api/faststream/nats/fastapi/NatsRouter.md deleted file mode 100644 index eaa8520..0000000 --- a/search/docs/api/faststream/nats/fastapi/NatsRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.fastapi.NatsRouter diff --git a/search/docs/api/faststream/nats/handler/LogicNatsHandler.md b/search/docs/api/faststream/nats/handler/LogicNatsHandler.md deleted file mode 100644 index 1a53b76..0000000 --- a/search/docs/api/faststream/nats/handler/LogicNatsHandler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.handler.LogicNatsHandler diff --git a/search/docs/api/faststream/nats/helpers/StreamBuilder.md b/search/docs/api/faststream/nats/helpers/StreamBuilder.md deleted file mode 100644 index cee7ef2..0000000 --- a/search/docs/api/faststream/nats/helpers/StreamBuilder.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.helpers.StreamBuilder diff --git a/search/docs/api/faststream/nats/js_stream/JStream.md b/search/docs/api/faststream/nats/js_stream/JStream.md deleted file mode 100644 index 6fd13ad..0000000 --- a/search/docs/api/faststream/nats/js_stream/JStream.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.js_stream.JStream diff --git a/search/docs/api/faststream/nats/message/NatsMessage.md b/search/docs/api/faststream/nats/message/NatsMessage.md deleted file mode 100644 index 2b1cf46..0000000 --- a/search/docs/api/faststream/nats/message/NatsMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.message.NatsMessage diff --git a/search/docs/api/faststream/nats/parser/NatsParser.md b/search/docs/api/faststream/nats/parser/NatsParser.md deleted file mode 100644 index 351ade6..0000000 --- a/search/docs/api/faststream/nats/parser/NatsParser.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.parser.NatsParser diff --git a/search/docs/api/faststream/nats/producer/NatsFastProducer.md b/search/docs/api/faststream/nats/producer/NatsFastProducer.md deleted file mode 100644 index c306c81..0000000 --- a/search/docs/api/faststream/nats/producer/NatsFastProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.producer.NatsFastProducer diff --git a/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md b/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md deleted file mode 100644 index e8b5402..0000000 --- a/search/docs/api/faststream/nats/producer/NatsJSFastProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.producer.NatsJSFastProducer diff --git a/search/docs/api/faststream/nats/publisher/LogicPublisher.md b/search/docs/api/faststream/nats/publisher/LogicPublisher.md deleted file mode 100644 index 01869cc..0000000 --- a/search/docs/api/faststream/nats/publisher/LogicPublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.publisher.LogicPublisher diff --git a/search/docs/api/faststream/nats/router/NatsRouter.md b/search/docs/api/faststream/nats/router/NatsRouter.md deleted file mode 100644 index 3c06960..0000000 --- a/search/docs/api/faststream/nats/router/NatsRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.router.NatsRouter diff --git a/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md b/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md deleted file mode 100644 index 90b6c3c..0000000 --- a/search/docs/api/faststream/nats/shared/logging/NatsLoggingMixin.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.shared.logging.NatsLoggingMixin diff --git a/search/docs/api/faststream/nats/shared/router/NatsRouter.md b/search/docs/api/faststream/nats/shared/router/NatsRouter.md deleted file mode 100644 index 53dc2d1..0000000 --- a/search/docs/api/faststream/nats/shared/router/NatsRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.shared.router.NatsRouter diff --git a/search/docs/api/faststream/nats/test/FakeProducer.md b/search/docs/api/faststream/nats/test/FakeProducer.md deleted file mode 100644 index 17bb040..0000000 --- a/search/docs/api/faststream/nats/test/FakeProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.test.FakeProducer diff --git a/search/docs/api/faststream/nats/test/PatchedMessage.md b/search/docs/api/faststream/nats/test/PatchedMessage.md deleted file mode 100644 index b2cd58d..0000000 --- a/search/docs/api/faststream/nats/test/PatchedMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.test.PatchedMessage diff --git a/search/docs/api/faststream/nats/test/TestNatsBroker.md b/search/docs/api/faststream/nats/test/TestNatsBroker.md deleted file mode 100644 index bf5dcf3..0000000 --- a/search/docs/api/faststream/nats/test/TestNatsBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.test.TestNatsBroker diff --git a/search/docs/api/faststream/nats/test/build_message.md b/search/docs/api/faststream/nats/test/build_message.md deleted file mode 100644 index ee8d45a..0000000 --- a/search/docs/api/faststream/nats/test/build_message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.nats.test.build_message diff --git a/search/docs/api/faststream/rabbit/asyncapi/Handler.md b/search/docs/api/faststream/rabbit/asyncapi/Handler.md deleted file mode 100644 index 6af44c6..0000000 --- a/search/docs/api/faststream/rabbit/asyncapi/Handler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.asyncapi.Handler diff --git a/search/docs/api/faststream/rabbit/asyncapi/Publisher.md b/search/docs/api/faststream/rabbit/asyncapi/Publisher.md deleted file mode 100644 index ac4f343..0000000 --- a/search/docs/api/faststream/rabbit/asyncapi/Publisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.asyncapi.Publisher diff --git a/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md b/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md deleted file mode 100644 index 1a4923c..0000000 --- a/search/docs/api/faststream/rabbit/asyncapi/RMQAsyncAPIChannel.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.asyncapi.RMQAsyncAPIChannel diff --git a/search/docs/api/faststream/rabbit/broker/RabbitBroker.md b/search/docs/api/faststream/rabbit/broker/RabbitBroker.md deleted file mode 100644 index 39f86ba..0000000 --- a/search/docs/api/faststream/rabbit/broker/RabbitBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.broker.RabbitBroker diff --git a/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md b/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md deleted file mode 100644 index e19594f..0000000 --- a/search/docs/api/faststream/rabbit/fastapi/RabbitRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.fastapi.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/handler/LogicHandler.md b/search/docs/api/faststream/rabbit/handler/LogicHandler.md deleted file mode 100644 index bc9da85..0000000 --- a/search/docs/api/faststream/rabbit/handler/LogicHandler.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.handler.LogicHandler diff --git a/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md b/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md deleted file mode 100644 index 31e8eaf..0000000 --- a/search/docs/api/faststream/rabbit/helpers/RabbitDeclarer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.helpers.RabbitDeclarer diff --git a/search/docs/api/faststream/rabbit/message/RabbitMessage.md b/search/docs/api/faststream/rabbit/message/RabbitMessage.md deleted file mode 100644 index 8544769..0000000 --- a/search/docs/api/faststream/rabbit/message/RabbitMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.message.RabbitMessage diff --git a/search/docs/api/faststream/rabbit/parser/AioPikaParser.md b/search/docs/api/faststream/rabbit/parser/AioPikaParser.md deleted file mode 100644 index 24b8258..0000000 --- a/search/docs/api/faststream/rabbit/parser/AioPikaParser.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.parser.AioPikaParser diff --git a/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md b/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md deleted file mode 100644 index 89866c6..0000000 --- a/search/docs/api/faststream/rabbit/producer/AioPikaFastProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.producer.AioPikaFastProducer diff --git a/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md b/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md deleted file mode 100644 index cfbb1e1..0000000 --- a/search/docs/api/faststream/rabbit/publisher/LogicPublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.publisher.LogicPublisher diff --git a/search/docs/api/faststream/rabbit/router/RabbitRouter.md b/search/docs/api/faststream/rabbit/router/RabbitRouter.md deleted file mode 100644 index 9e82b4d..0000000 --- a/search/docs/api/faststream/rabbit/router/RabbitRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.router.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/security/parse_security.md b/search/docs/api/faststream/rabbit/security/parse_security.md deleted file mode 100644 index 9d4ded7..0000000 --- a/search/docs/api/faststream/rabbit/security/parse_security.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.security.parse_security diff --git a/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md b/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md deleted file mode 100644 index a92285d..0000000 --- a/search/docs/api/faststream/rabbit/shared/constants/ExchangeType.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.constants.ExchangeType diff --git a/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md b/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md deleted file mode 100644 index e01ac23..0000000 --- a/search/docs/api/faststream/rabbit/shared/logging/RabbitLoggingMixin.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.logging.RabbitLoggingMixin diff --git a/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md b/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md deleted file mode 100644 index 50ef041..0000000 --- a/search/docs/api/faststream/rabbit/shared/publisher/ABCPublisher.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.publisher.ABCPublisher diff --git a/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md b/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md deleted file mode 100644 index 2be3db3..0000000 --- a/search/docs/api/faststream/rabbit/shared/router/RabbitRouter.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.router.RabbitRouter diff --git a/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md b/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md deleted file mode 100644 index e0f233b..0000000 --- a/search/docs/api/faststream/rabbit/shared/schemas/BaseRMQInformation.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.schemas.BaseRMQInformation diff --git a/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md b/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md deleted file mode 100644 index 378d85a..0000000 --- a/search/docs/api/faststream/rabbit/shared/schemas/RabbitExchange.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.schemas.RabbitExchange diff --git a/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md b/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md deleted file mode 100644 index be64b41..0000000 --- a/search/docs/api/faststream/rabbit/shared/schemas/RabbitQueue.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.schemas.RabbitQueue diff --git a/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md b/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md deleted file mode 100644 index 5d56af2..0000000 --- a/search/docs/api/faststream/rabbit/shared/schemas/get_routing_hash.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.shared.schemas.get_routing_hash diff --git a/search/docs/api/faststream/rabbit/test/FakeProducer.md b/search/docs/api/faststream/rabbit/test/FakeProducer.md deleted file mode 100644 index 2c322d5..0000000 --- a/search/docs/api/faststream/rabbit/test/FakeProducer.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.test.FakeProducer diff --git a/search/docs/api/faststream/rabbit/test/PatchedMessage.md b/search/docs/api/faststream/rabbit/test/PatchedMessage.md deleted file mode 100644 index 1d48ae3..0000000 --- a/search/docs/api/faststream/rabbit/test/PatchedMessage.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.test.PatchedMessage diff --git a/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md b/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md deleted file mode 100644 index df580a8..0000000 --- a/search/docs/api/faststream/rabbit/test/TestRabbitBroker.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.test.TestRabbitBroker diff --git a/search/docs/api/faststream/rabbit/test/build_message.md b/search/docs/api/faststream/rabbit/test/build_message.md deleted file mode 100644 index ce8d58f..0000000 --- a/search/docs/api/faststream/rabbit/test/build_message.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.rabbit.test.build_message diff --git a/search/docs/api/faststream/utils/classes/Singleton.md b/search/docs/api/faststream/utils/classes/Singleton.md deleted file mode 100644 index 50c6564..0000000 --- a/search/docs/api/faststream/utils/classes/Singleton.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.classes.Singleton diff --git a/search/docs/api/faststream/utils/context/main/ContextRepo.md b/search/docs/api/faststream/utils/context/main/ContextRepo.md deleted file mode 100644 index a2ff21a..0000000 --- a/search/docs/api/faststream/utils/context/main/ContextRepo.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.context.main.ContextRepo diff --git a/search/docs/api/faststream/utils/context/types/Context.md b/search/docs/api/faststream/utils/context/types/Context.md deleted file mode 100644 index 456c6bd..0000000 --- a/search/docs/api/faststream/utils/context/types/Context.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.context.types.Context diff --git a/search/docs/api/faststream/utils/context/types/resolve_context.md b/search/docs/api/faststream/utils/context/types/resolve_context.md deleted file mode 100644 index 53855bc..0000000 --- a/search/docs/api/faststream/utils/context/types/resolve_context.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.context.types.resolve_context diff --git a/search/docs/api/faststream/utils/data/filter_by_dict.md b/search/docs/api/faststream/utils/data/filter_by_dict.md deleted file mode 100644 index 83ce1b9..0000000 --- a/search/docs/api/faststream/utils/data/filter_by_dict.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.data.filter_by_dict diff --git a/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md b/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md deleted file mode 100644 index cc9478c..0000000 --- a/search/docs/api/faststream/utils/functions/get_function_positional_arguments.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.functions.get_function_positional_arguments diff --git a/search/docs/api/faststream/utils/functions/timeout_scope.md b/search/docs/api/faststream/utils/functions/timeout_scope.md deleted file mode 100644 index 5a55da4..0000000 --- a/search/docs/api/faststream/utils/functions/timeout_scope.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.functions.timeout_scope diff --git a/search/docs/api/faststream/utils/functions/to_async.md b/search/docs/api/faststream/utils/functions/to_async.md deleted file mode 100644 index 98689c4..0000000 --- a/search/docs/api/faststream/utils/functions/to_async.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.functions.to_async diff --git a/search/docs/api/faststream/utils/no_cast/NoCast.md b/search/docs/api/faststream/utils/no_cast/NoCast.md deleted file mode 100644 index 180a5ef..0000000 --- a/search/docs/api/faststream/utils/no_cast/NoCast.md +++ /dev/null @@ -1,3 +0,0 @@ - - -::: faststream.utils.no_cast.NoCast diff --git a/search/docs/getting-started/asyncapi/custom.md b/search/docs/getting-started/asyncapi/custom.md deleted file mode 100644 index 9771dca..0000000 --- a/search/docs/getting-started/asyncapi/custom.md +++ /dev/null @@ -1,188 +0,0 @@ -# Customizing AsyncAPI Documentation for FastStream - -In this guide, we will explore how to customize AsyncAPI documentation for your FastStream application. Whether you want to add custom app info, broker information, handlers, or fine-tune payload details, we'll walk you through each step. - -## Prerequisites - -Before we dive into customization, ensure you have a basic FastStream application up and running. If you haven't done that yet, let's setup a simple appication right now. - -Copy the following code in your basic.py file: - -```python linenums="1" -from faststream import FastStream -from faststream.kafka import KafkaBroker, KafkaMessage - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - -@broker.publisher("output_data") -@broker.subscriber("input_data") -async def on_input_data(msg): - # your processing logic - pass -``` - -faststream docs serve basic:app - -![HTML-page](../../../assets/img/AsyncAPI-basic-html-short.png) - -## Setup Custom FastStream App Info - -Let's start by customizing the app information that appears in your AsyncAPI documentation. This is a great way to give your documentation a personal touch. Here's how: - -1. Locate the app configuration in your FastStream application. -1. Update the `title`, `version`, and `description` fields to reflect your application's details. -1. Save the changes. -1. Serve your FastStream app documentation. - -Copy the following code in your basic.py file, we have highligted the additional info passed to FastStream app: - -```python linenums="1" hl_lines="7-12" -from faststream import FastStream -from faststream.kafka import KafkaBroker, KafkaMessage -from faststream.asyncapi.schema import Contact, ExternalDocs, License, Tag - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker, - title="My App", - version="1.0.0", - description="Test description", - license=License(name="MIT", url="https://opensource.org/license/mit/"), - terms_of_service="https://my-terms.com/", - contact=Contact(name="support", url="https://help.com/"), - ) - -@broker.publisher("output_data") -@broker.subscriber("input_data") -async def on_input_data(msg): - # your processing logic - pass -``` - -faststream docs serve basic:app - -![HTML-page](../../../assets/img/AsyncAPI-custom-info.png) - -Now, your documentation reflects your application's identity and purpose. - -## Setup Custom Broker Information - -The next step is to customize broker information. This helps users understand the messaging system your application uses. Follow these steps: - -1. Locate the broker configuration in your FastStream application. -1. Update the `description` field. -1. Save the changes. -1. Serve your FastStream app. - -Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app broker: - -```python linenums="1" hl_lines="5" -from faststream import FastStream -from faststream.kafka import KafkaBroker, KafkaMessage -from faststream.asyncapi.schema import Tag - -broker = KafkaBroker("localhost:9092", description="Kafka broker running locally") -app = FastStream(broker) - -@broker.publisher("output_data") -@broker.subscriber("input_data") -async def on_input_data(msg): - # your processing logic - pass -``` - -faststream docs serve basic:app - -![HTML-page](../../../assets/img/AsyncAPI-custom-broker.png) - -Your AsyncAPI documentation now provides clear insights into the messaging infrastructure you're using. - -## Setup Custom Handler Information - -Customizing handler information helps users comprehend the purpose and behavior of each message handler. Here's how to do it: - -1. Navigate to your handler definitions in your FastStream application. -1. Add descriptions to each handler using `description` field. -1. Save the changes. -1. Serve your FastStream app. - -Copy the following code in your basic.py file, we have highligted the additional info passed to the FastStream app handlers: - -```python linenums="1" hl_lines="7-8" -from faststream import FastStream -from faststream.kafka import KafkaBroker, KafkaMessage - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - -@broker.publisher("output_data", description="My publisher description") -@broker.subscriber("input_data", description="My subscriber description") -async def on_input_data(msg): - # your processing logic - pass -``` - -faststream docs serve basic:app - -![HTML-page](../../../assets/img/AsyncAPI-custom-handler.png) - -Now, your documentation is enriched with meaningful details about each message handler. - -## Setup Payload Information via Pydantic Model - -To describe your message payload effectively, you can use Pydantic models. Here's how: - -1. Define Pydantic models for your message payloads. -1. Annotate these models with descriptions and examples. -1. Use these models as argument types or return types in your handlers. -1. Save the changes. -1. Serve your FastStream app. - -Copy the following code in your basic.py file, we have highligted the creation of payload info and you can see it being passed to the return type and the `msg` argument type in the `on_input_data` function: - -```python linenums="1" hl_lines="5" -from pydantic import BaseModel, Field, NonNegativeFloat - -from faststream import FastStream -from faststream.kafka import KafkaBroker - - -class DataBasic(BaseModel): - data: NonNegativeFloat = Field( - ..., examples=[0.5], description="Float data example" - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -@broker.publisher("output_data") -@broker.subscriber("input_data") -async def on_input_data(msg: DataBasic) -> DataBasic: - # your processing logic - pass -``` - -faststream docs serve basic:app - -![HTML-page](../../../assets/img/AsyncAPI-payload-info.png) - -Your AsyncAPI documentation now showcases well-structured payload information. - -## Generate Schema.json, Customize Manually, and Serve It - -To take customization to the next level, you can manually modify the schema.json file. Follow these steps: - -faststream docs gen basic:app -1. Manually edit the asyncapi.json file to add custom fields, descriptions, and details. -1. Save your changes. -faststream docs serve asyncapi.json - -Now, you have fine-tuned control over your AsyncAPI documentation. - -## Conclusion - -Customizing AsyncAPI documentation for your FastStream application not only enhances its appearance but also provides valuable insights to users. With these steps, you can create documentation that's not only informative but also uniquely yours. - -Happy coding with your customized FastStream AsyncAPI documentation! diff --git a/search/docs/getting-started/asyncapi/export.md b/search/docs/getting-started/asyncapi/export.md deleted file mode 100644 index 177f854..0000000 --- a/search/docs/getting-started/asyncapi/export.md +++ /dev/null @@ -1,56 +0,0 @@ -# How to Generate and Serve AsyncAPI Documentation - -In this guide, let's explore how to generate and serve [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} documentation for our FastStream application. - -## Writing the FastStream Application - -Here's an example Python application using **FastStream** that consumes data from a -topic, increments the value, and outputs the data to another topic. -Save it in a file called `basic.py`. - -``` python -from pydantic import BaseModel, Field, NonNegativeFloat - -from faststream import FastStream, Logger -from faststream.kafka import KafkaBroker - - -class DataBasic(BaseModel): - data: NonNegativeFloat = Field( - ..., examples=[0.5], description="Float data example" - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -@broker.publisher("output_data") -@broker.subscriber("input_data") -async def on_input_data(msg: DataBasic, logger: Logger) -> DataBasic: - logger.info(msg) - return DataBasic(data=msg.data + 1.0) -``` - -## Generating the AsyncAPI Specification - -Now that we have a FastStream application, we can proceed with generating the AsyncAPI specification using a CLI command. - -``` shell -faststream docs gen basic:app -``` - -The above command will generate the AsyncAPI specification and save it in a file called `asyncapi.json`. - -If you prefer `yaml` instead of `json`, please run the following command to generate `asyncapi.yaml`. - -``` shell -faststream docs gen --yaml basic:app -``` - -!!! note - To generate the documentation in yaml format, please install the necessary dependency to work with **YAML** file format at first. - - ``` shell - pip install PyYAML - ``` diff --git a/search/docs/getting-started/asyncapi/hosting.md b/search/docs/getting-started/asyncapi/hosting.md deleted file mode 100644 index 67b8ef3..0000000 --- a/search/docs/getting-started/asyncapi/hosting.md +++ /dev/null @@ -1,38 +0,0 @@ -# Serving the AsyncAPI Documentation - -FastStream provides a command to serve the AsyncAPI documentation. - -!!! note - This feature requires an Internet connection to obtain the **AsyncAPI HTML** via **CDN**. - -``` shell -faststream docs serve basic:app -``` - -In the above command, we are providing the path in the format of `python_module:FastStream`. Alternatively, you can also specify `asyncapi.json` or `asyncapi.yaml` to serve the AsyncAPI documentation. - -``` shell -faststream docs serve asyncapi.json -# or -faststream docs serve asyncapi.yaml -``` - -After running the command, it should serve the AsyncAPI documentation on port **8000** and display the following logs in the terminal. - -``` shell -INFO: Started server process [2364992] -INFO: Waiting for application startup. -INFO: Application startup complete. -INFO: Uvicorn running on http://localhost:8000 (Press CTRL+C to quit) -``` - -And you should be able to see the following page in your browser: - -=== "Short" - ![HTML-page](../../../assets/img/AsyncAPI-basic-html-short.png){ loading=lazy } - -=== "Expand" - ![HTML-page](../../../assets/img/AsyncAPI-basic-html-full.png){ loading=lazy } - -!!! tip - The command also offers options to serve the documentation on a different host and port. diff --git a/search/docs/getting-started/cli/index.md b/search/docs/getting-started/cli/index.md deleted file mode 100644 index 4891760..0000000 --- a/search/docs/getting-started/cli/index.md +++ /dev/null @@ -1,128 +0,0 @@ -# CLI - -**FastStream** has its own built-in **CLI** tool for your maximum comfort as a developer. - -!!! quote "" - Thanks to [*typer*](https://typer.tiangolo.com/){.external-link target="_blank"} and [*watchfiles*](https://watchfiles.helpmanual.io/){.external-link target="_blank"}. Their work is the basis of this tool. - -```shell -faststream --help -``` - -```{ .shell .no-copy } -Usage: faststream [OPTIONS] COMMAND [ARGS]... - - Generate, run and manage FastStream apps to greater development experience - -Options: - -v, --version Show current platform, python and FastStream - version - --install-completion [bash|zsh|fish|powershell|pwsh] - Install completion for the specified shell. - --show-completion [bash|zsh|fish|powershell|pwsh] - Show completion for the specified shell, to - copy it or customize the installation. - --help Show this message and exit. - -Commands: - docs AsyncAPI schema commands - run Run [MODULE:APP] FastStream application -``` - -## Running the Project - -### Multiprocessing Scaling - -**FastStream** allows you to scale application right from the command line by running you application in the Process pool. - -Just set the `--worker` option to scale your application: - -```shell -faststream run serve:app --workers 2 -``` - -```{ .shell .no-copy } -INFO - Started parent process [7591] -INFO - Started child process [7593] -INFO - Started child process [7594] -INFO - test | - `Handle` waiting for messages -INFO - test | - `Handle` waiting for messages -``` - -### Hot Reload - -Thanks to [*watchfiles*](https://watchfiles.helpmanual.io/){.external-link target="_blank"}, written in *Rust*, you can -work with your project easily. Edit the code as much as you like - the new version has already been launched and is waiting for your requests! - -```shell -faststream run serve:app --reload -``` - -```{ .shell .no-copy } -INFO - Started reloader process [7902] using WatchFiles -INFO - FastStream app starting... -INFO - test | - `Handle` waiting for messages -INFO - FastStream app started successfully! To exit press CTRL+C -``` - -### Environment Management - -You can pass any custom flags and launch options to the **FastStream CLI** even without first registering them. Just use them when launching the application - and they will be right in your environment. - -Use this option to select environment files, configure logging, or at your discretion. - -For example, we will pass the *.env* file to the context of our application: - -```shell -faststream run serve:app --env=.env.dev -``` - -```{ .shell .no-copy } -INFO - FastStream app starting... -INFO - test | - `Handle` waiting for messages -INFO - FastStream app started successfully! To exit press CTRL+C -``` - -{! includes/getting_started/cli/env.md !} - -!!! note - Note that the `env` parameter was passed to the `setup` function directly from the command line - -All passed values can be of type `#!python bool`, `#!python str` or `#!python list[str]`. - -In this case, the flags will be interpreted as follows: - -```{ .shell .no-copy } -faststream run app:app --flag # flag = True -faststream run app:app --no-flag # flag = False -faststream run app:app --my-flag # my_flag = True -faststream run app:app --key value # key = "value" -faststream run app:app --key 1 2 # key = ["1", "2"] -``` - -You can use them both individually and together in unlimited quantities. - -## AsyncAPI Schema - -Also, the **FastStream CLI** allows you to work with the **AsyncAPI** schema in a simple way. - -You are able to generate `.json` or `.yaml` files by your application code or host **HTML** representation directly: - -```shell -faststream docs --help -``` - -```{ .shell .no-copy } -Usage: faststream docs [OPTIONS] COMMAND [ARGS]... - - AsyncAPI schema commands - -Options: - --help Show this message and exit. - -Commands: - gen Generate project AsyncAPI schema - serve Serve project AsyncAPI schema -``` - -To learn more about the commands above, please visit [**AsyncAPI export**](../asyncapi/export.md){.internal-link} and [**AsyncAPI hosting**](../asyncapi/hosting.md){.internal-link}. diff --git a/search/docs/getting-started/config/index.md b/search/docs/getting-started/config/index.md deleted file mode 100644 index 796dcfa..0000000 --- a/search/docs/getting-started/config/index.md +++ /dev/null @@ -1,167 +0,0 @@ -# Settings and Environment Variables - -In many cases, your application may require external settings or configurations, such as a broker connection or database credentials. - -To manage these settings effectively, it's common to provide them through environment variables that can be read by the application. - -## Pydantic `Settings` - -Fortunately, **Pydantic** provides a useful utility for handling settings coming from environment variables with [Pydantic: Settings management](https://docs.pydantic.dev/latest/usage/pydantic_settings/){.external-link target="_blank"}. - -### Install `pydantic-settings` - -First, install the `pydantic-settings` package: - -```console -pip install pydantic-settings -``` - -!!! info - In **Pydantic v1**, this functionality was included with the main package. Now it is distributed as an independent package so that you can choose not to install it if you don't need that functionality. - -### Create the `Settings` Object - -Import `BaseSettings` from Pydantic and create a subclass, similar to what you would do with a Pydantic model. - -Just like with Pydantic models, you declare class attributes with type annotations and can use all the same validation features and tools, including different data types and additional validations with `Field()`. - -=== "Pydantic v2" - ```python linenums='1' hl_lines="1 4" title="config.py" -from pydantic_settings import BaseSettings - - -class Settings(BaseSettings): - url: str = "" - queue: str = "test-queue" - - -settings = Settings() - ``` - -=== "Pydantic v1" - !!! info - In **Pydantic v1** you would import `BaseSettings` directly from `pydantic` instead of from `pydantic_settings`. - - ```python linenums='1' hl_lines="1 4" title="config.py" -from pydantic import BaseSettings - - -class Settings(BaseSettings): - url: str = "" - queue: str = "test-queue" - - -settings = Settings() - ``` - -When you create an instance of that `Settings` class (in this case, in the `settings` object), Pydantic will read the environment variables in a case-insensitive way. For example, an upper-case variable `APP_NAME` will still be read for the attribute `app_name`. - -It will also convert and validate the data, so when you use that `settings` object, you will have data of the type you declared (e.g. `items_per_user` will be an `int`). - -### Using the `settings` - -Now you can use the new `settings` object in your application: - -```python linenums='1' hl_lines="3 9 14" title="serve.py" -import os - -from pydantic_settings import BaseSettings - -from faststream import FastStream -from faststream.rabbit import RabbitBroker - - -class Settings(BaseSettings): - url: str - queue: str = "test-queue" - - -settings = Settings(_env_file=os.getenv("ENV", ".env")) - -broker = RabbitBroker(settings.url) -app = FastStream(broker) - - -@broker.subscriber(settings.queue) -async def handler(msg): - ... -``` - -### Running the Application - -You can run the application while passing the configuration parameters as environment variables. For example, you could set an `URL`: - -```console -URL="amqp://guest:guest@localhost:5672" faststream run serve:app -``` - -!!! tip - To set multiple environment variables for a single command, separate them with spaces and put them all before the command. - -## Reading a `.env` File - -If you have many settings that may change frequently, especially in different environments, it might be useful to store them in a file and then read them as if they were environment variables. - -This practice is common enough that it has a name; these environment variables are typically placed in a file named `.env`, commonly referred to as a "dotenv" file. - -!!! tip - In Unix-like systems like Linux and macOS, a file starting with a dot (`.`) is considered a hidden file. - - But a dotenv file doesn't really have to have that exact filename. - -Pydantic supports reading from these types of files using an external library. You can learn more at [Pydantic Settings: Dotenv (.env) support](https://docs.pydantic.dev/latest/usage/pydantic_settings/#dotenv-env-support){.external-link target="_blank"}. - -!!! tip - To use this feature, you need to install the `python-dotenv` library. - -### The `.env` File - -You can create a `.env` file with contents like this: - -```bash -URL="amqp://guest:guest@localhost:5672" -QUEUE="test-queue" -``` - -### Reading Settings from `.env` - -Then update your `config.py` as follows: - -```python linenums='1' hl_lines="1 11" -import os - -from pydantic_settings import BaseSettings - - -class Settings(BaseSettings): - url: str - queue: str = "test-queue" - - -settings = Settings(_env_file=os.getenv("ENV", ".env")) -``` - -This way, you can specify different `.env` files directly from your terminal, which can be extremely helpful for various testing and production scenarios. - -!!! note - By default, Pydantic will attempt to find a `.env` file. If it's not present, Pydantic will use the default field values. - -### Choosing the `.env` File at Startup - -Now you can run the apllication with different `.env` files like so: - -```console -ENV=.local.env faststream run serve:app -``` - -Or, for a production environment: - -```console -ENV=.production.env faststream run serve:app -``` - -Or even for a test environment: - -```console -ENV=.test.env pytest -``` diff --git a/search/docs/getting-started/context/custom.md b/search/docs/getting-started/context/custom.md deleted file mode 100644 index dd99314..0000000 --- a/search/docs/getting-started/context/custom.md +++ /dev/null @@ -1,83 +0,0 @@ -# Context Fields Declaration - -You can also store your own objects in the `Context`. - -## Global - -To declare an application-level context field, you need to call the `context.set_global` method with with a key to indicate where the object will be placed in the context. - -=== "Kafka" - ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:1-5,16-18] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:1-5,16-18] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="9-10" - {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:1-5,16-18] !} - ``` - -Afterward, you can access your `secret` field in the usual way: - -=== "Kafka" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_kafka.py [ln:8-13] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_rabbit.py [ln:8-13] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/context/custom_global_context_nats.py [ln:8-13] !} - ``` - -In this case, the field becomes a global context field: it does not depend on the current message handler (unlike `message`) - -To remove a field from the context use the `reset_global` method: - -```python -context.reset_global("my_key") -``` - -## Local - -To set a local context (available only within the message processing scope), use the context manager `scope` - -=== "Kafka" - ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="15 19 21-22" - {!> docs_src/getting_started/context/custom_local_context_nats.py !} - ``` - -You can also set the context yourself, and it will remain within the current call stack until you clear it. - -=== "Kafka" - ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 14 25" - {!> docs_src/getting_started/context/manual_local_context_nats.py !} - ``` diff --git a/search/docs/getting-started/context/existed.md b/search/docs/getting-started/context/existed.md deleted file mode 100644 index 935e7a0..0000000 --- a/search/docs/getting-started/context/existed.md +++ /dev/null @@ -1,82 +0,0 @@ -# Existing Fields - -**Context** already contains some global objects that you can always access: - -* **broker** - the current broker -* **context** - the context itself, in which you can write your own fields -* **logger** - the logger used for your broker (tags messages with *message_id*) -* **message** - the raw message (if you need access to it) - -At the same time, thanks to `contextlib.ContextVar`, **message** is local for you current consumer scope. - -## Access to Context Fields - -By default, the context searches for an object based on the argument name. - -=== "Kafka" - ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-2,10-11,14-23] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-2,10-11,14-23] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 12-15" - {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-2,10-11,14-23] !} - ``` - -## Annotated Aliases - -Also, **FastStream** has already created `Annotated` aliases to provide you with comfortable access to existing objects. You can import them directly from `faststream` or your broker-specific modules: - -* Shared aliases - -```python -from faststream import Logger, ContextRepo -``` - -* *Kafka* aliases - -```python -from faststream.kafka.annotations import ( - Logger, ContextRepo, KafkaMessage, KafkaBroker, KafkaProducer -) -``` - -* *RabbitMQ* aliases - -```python -from faststream.rabbit.annotations import ( - Logger, ContextRepo, RabbitMessage, RabbitBroker, RabbitProducer -) -``` - -* *NATS* aliases - -```python -from faststream.rabbit.annotations import ( - Logger, ContextRepo, NatsMessage, - NatsBroker, NatsProducer, NatsJsProducer, - Client, JsClient, -) -``` - -To use them, simply import and use them as subscriber argument annotations. - -=== "Kafka" - ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_kafka.py [ln:1-11,26-35] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_rabbit.py [ln:1-11,26-35] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="3-8 17-20" - {!> docs_src/getting_started/context/existed_context_nats.py [ln:1-11,26-35] !} - ``` diff --git a/search/docs/getting-started/context/extra.md b/search/docs/getting-started/context/extra.md deleted file mode 100644 index 3824e51..0000000 --- a/search/docs/getting-started/context/extra.md +++ /dev/null @@ -1,60 +0,0 @@ -# Context Extra Options - -Additionally, `Context` provides you with some extra capabilities for working with containing objects. - -## Default Values - -For instance, if you attempt to access a field that doesn't exist in the global context, you will receive a `pydantic.ValidationError` exception. - -However, you can set default values if needed. - -=== "Kafka" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_kafka.py [ln:7-11] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_rabbit.py [ln:7-11] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/default_arguments_nats.py [ln:7-11] !} - ``` - -## Cast Context Types - -By default, context fields are **NOT CAST** to the type specified in their annotation. - -=== "Kafka" - ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_kafka.py [ln:1-12] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_rabbit.py [ln:1-12] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="6 10 12" - {!> docs_src/getting_started/context/cast_nats.py [ln:1-12] !} - ``` - -If you require this functionality, you can enable the appropriate flag. - -=== "Kafka" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_kafka.py [ln:14-18] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_rabbit.py [ln:14-18] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="3 5" - {!> docs_src/getting_started/context/cast_nats.py [ln:14-18] !} - ``` diff --git a/search/docs/getting-started/context/fields.md b/search/docs/getting-started/context/fields.md deleted file mode 100644 index e30ff72..0000000 --- a/search/docs/getting-started/context/fields.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -comment_1: This way you can get access to context object by its name -comment_2: This way you can get access to context object specific field ---- - -# Access by Name - -Sometimes, you may need to use a different name for the argument (not the one under which it is stored in the context) or get access to specific parts of the object. To do this, simply specify the name of what you want to access, and the context will provide you with the object. - -{% import 'getting_started/context/fields.md' as includes with context %} -{{ includes }} diff --git a/search/docs/getting-started/context/index.md b/search/docs/getting-started/context/index.md deleted file mode 100644 index 67e6d02..0000000 --- a/search/docs/getting-started/context/index.md +++ /dev/null @@ -1,66 +0,0 @@ -# Application Context - -**FastStreams** has its own Dependency Injection container - **Context**, used to store application runtime objects and variables. - -With this container, you can access both application scope and message processing scope objects. This functionality is similar to [`Depends`](../dependencies/index.md){.internal-link} usage. - -=== "Kafka" - ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 11" - {!> docs_src/getting_started/context/base_nats.py !} - ``` - -But, with the [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-docs target="_blank"} Python feature usage, it is much closer to `#!python @pytest.fixture`. - -=== "Kafka" - ```python linenums="1" hl_lines="1 6 15" - {!> docs_src/getting_started/context/annotated_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 6 15" - {!> docs_src/getting_started/context/annotated_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 6 15" - {!> docs_src/getting_started/context/annotated_nats.py !} - ``` - -## Usages - -By default, the context is available in the same place as `Depends`: - -* at lifespan hooks -* message subscribers -* nested dependencies - -!!! tip - Fields obtained from the `Context` are editable, so editing them in a function means editing them everywhere. - -## Compatibility with Regular Functions - -To use context in other functions, use the `#!python @apply_types` decorator. In this case, the context of the called function will correspond to the context of the event handler from which it was called. - -```python linenums="1" hl_lines="6 9-10" -from faststream import Context, apply_types -@broker.subscriber("test") -async def handler(body): - nested_func(body) - - -@apply_types -def nested_func(body, logger=Context()): - logger.info(body) -``` - -In the example above, we did not pass the `logger` function at calling it; it was placed outside of context. diff --git a/search/docs/getting-started/contributing/CONTRIBUTING.md b/search/docs/getting-started/contributing/CONTRIBUTING.md deleted file mode 100644 index 0eed253..0000000 --- a/search/docs/getting-started/contributing/CONTRIBUTING.md +++ /dev/null @@ -1,143 +0,0 @@ -# Development - -After cloning the project, you'll need to set up the development environment. Here are the guidelines on how to do this. - -## Virtual Environment with `venv` - -Create a virtual environment in a directory using Python's `venv` module: - -```bash -python -m venv venv -``` - -That will create a `./venv/` directory with Python binaries, allowing you to install packages in an isolated environment. - -## Activate the Environment - -Activate the new environment with: - -```bash -source ./venv/bin/activate -``` - -Ensure you have the latest pip version in your virtual environment: - -```bash -python -m pip install --upgrade pip -``` - -## Installing Dependencies - -After activating the virtual environment as described above, run: - -```bash -pip install -e ".[dev]" -``` - -This will install all the dependencies and your local FastStream in your virtual environment. - -### Using Your local FastStream - -If you create a Python file that imports and uses FastStream, and run it with the Python from your local environment, it will use your local FastStream source code. - -Whenever you update your local FastStream source code, it will automatically use the latest version when you run your Python file again. This is because it is installed with `-e`. - -This way, you don't have to "install" your local version to be able to test every change. - -To use your local FastStream CLI, type: - -```bash -python -m faststream ... -``` - -## Running Tests - -### Pytest - -To run tests with your current FastStream application and Python environment, use: - -```bash -pytest tests -# or -./scripts/test.sh -# with coverage output -./scripts/test-cov.sh -``` - -In your project, you'll find some *pytest marks*: - -* **slow** -* **rabbit** -* **kafka** -* **nats** -* **all** - -By default, running *pytest* will execute "not slow" tests. - -To run all tests use: - -```bash -pytest -m 'all' -``` - -If you don't have a local broker instance running, you can run tests without those dependencies: - -```bash -pytest -m 'not rabbit and not kafka and not nats' -``` - -To run tests based on RabbitMQ, Kafka, or other dependencies, the following dependencies are needed to be started as docker containers: - -```yaml -version: "3" -services: - # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service - rabbitmq: - image: rabbitmq:alpine - ports: - - "5672:5672" - # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges - security_opt: - - no-new-privileges:true - # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service - kafka: - image: bitnami/kafka:3.5.0 - ports: - - "9092:9092" - environment: - KAFKA_ENABLE_KRAFT: "true" - KAFKA_CFG_NODE_ID: "1" - KAFKA_CFG_PROCESS_ROLES: "broker,controller" - KAFKA_CFG_CONTROLLER_LISTENER_NAMES: "CONTROLLER" - KAFKA_CFG_LISTENERS: "PLAINTEXT://:9092,CONTROLLER://:9093" - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT" - KAFKA_CFG_ADVERTISED_LISTENERS: "PLAINTEXT://127.0.0.1:9092" - KAFKA_BROKER_ID: "1" - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: "1@kafka:9093" - ALLOW_PLAINTEXT_LISTENER: "true" - # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges - security_opt: - - no-new-privileges:true - # nosemgrep: yaml.docker-compose.security.writable-filesystem-service.writable-filesystem-service - nats: - image: nats - command: -js - ports: - - 4222:4222 - - 8222:8222 # management - # https://semgrep.dev/r?q=yaml.docker-compose.security.no-new-privileges.no-new-privileges - security_opt: - - no-new-privileges:true -``` - -You can start the dependencies easily using provided script by running: - -```bash -./scripts/start_test_env.sh -``` - -Once you are done with development and running tests, you can stop the dependencies' docker containers by running: - -```bash -./scripts/stop_test_env.sh -``` diff --git a/search/docs/getting-started/contributing/docs.md b/search/docs/getting-started/contributing/docs.md deleted file mode 100644 index c12180e..0000000 --- a/search/docs/getting-started/contributing/docs.md +++ /dev/null @@ -1,44 +0,0 @@ -# Documentation - -## How to help - -You will be of invaluable help if you contribute to the documentation. - -Such a contribution can be: - -* Indications of inaccuracies, errors, typos -* Suggestions for editing specific sections -* Making additions - -You can report all this in [discussions](https://github.com/airtai/faststream/discussions){.external-link targer="_blank"} on GitHub, start [issue](https://github.com/airtai/faststream/issues){.external-link targer="_blank"}, or write about it in our [discord](https://discord.gg/CJWmYpyFbc){.external-link targer="_blank"} group. - -!!! note - Special thanks to those who are ready to offer help with the case and help in **developing documentation**, as well as translating it into **other languages**. - -## How to get started - -To develop the documentation, you don't even need to install the entire **FastStream** project as a whole. - -Enough: - -1. Clone the project repository -2. Create a virtual environment - ```bash - python -m venv venv - ``` -3. Activate it - ```bash - source venv/bin/activate - ``` -4. Install documentation dependencies - ```bash - pip install ".[devdocs]" - ``` -5. Go to the `docs/` directory -6. Start the local documentation server - ```bash - mkdocs serve - ``` - -Now all changes in the documentation files will be reflected on your local version of the site. -After making all the changes, you can issue a `PR` with them - and we will gladly accept it! diff --git a/search/docs/getting-started/dependencies/class.md b/search/docs/getting-started/dependencies/class.md deleted file mode 100644 index e69de29..0000000 diff --git a/search/docs/getting-started/dependencies/global.md b/search/docs/getting-started/dependencies/global.md deleted file mode 100644 index e8a2d4d..0000000 --- a/search/docs/getting-started/dependencies/global.md +++ /dev/null @@ -1,3 +0,0 @@ - -* Broker-level dependencies -* Subscriber-level dependencies diff --git a/search/docs/getting-started/dependencies/index.md b/search/docs/getting-started/dependencies/index.md deleted file mode 100644 index 647d07e..0000000 --- a/search/docs/getting-started/dependencies/index.md +++ /dev/null @@ -1,163 +0,0 @@ ---- -nested: A nested dependency is called here ---- - -# Dependencies - -**FastStream** uses the secondary library [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"} for dependency management. -This dependency system is literally borrowed from **FastAPI**, so if you know how to work with that framework, you'll be comfortable with dependencies in **FastStream**. - -You can visit the [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"} documentation for more details, but the key points and **additions** are covered here. - -## Type Casting - -The key function in the dependency management and type conversion system in **FastStream** is the decorator `#!python @apply_types` (also known as `#!python @inject` in **FastDepends**). - -By default, it applies to all event handlers, unless you disabled the same option when creating the broker. - -{! includes/getting_started/dependencies/1.md !} - -!!! warning - Setting the `apply_types=False` flag not only disables type casting but also `Depends` and `Context`. - -This flag can be useful if you are using **FastStream** within another framework and you need to use its native dependency system. - -## Dependency Injection - -To implement dependencies in **FastStream**, a special class called **Depends** is used - -{! includes/getting_started/dependencies/2.md !} - -**The first step**: You need to declare a dependency, which can be any `Callable` object. - -??? note "Callable" - A "Callable" is an object that can be "called". It can be a function, a class, or a class method. - - In other words, if you can write code like `my_object()` - `my_object` is `Callable` - -{! includes/getting_started/dependencies/3.md !} - -**Second step**: Declare which dependencies you need using `Depends` - -{! includes/getting_started/dependencies/4.md !} - -**The last step**: Just use the result of executing your dependency! - -It's easy, isn't it? - -!!! tip "Auto `#!python @apply_types`" - In the code above, we didn't use this decorator for our dependencies. However, it still applies - to all functions used as dependencies. Please keep this in your mind. - -## Top-level Dependencies - -If you don't need a dependency result, you can use the following code: - -```python -@broker.subscriber("test") -def method(_ = Depends(...)): ... -``` - -But, using a special `subscriber` parameter is much more suitable: - -```python -@broker.subscriber("test", dependencies=[Depends(...)]) -def method(): ... -``` - -You can also declare broker-level dependencies, which will be applied to all broker's handlers: - -```python -broker = RabbitBroker(dependencies=[Depends(...)]) -``` - -## Nested Dependencies - -Dependencies can also contain other dependencies. This works in a very predictable way: just declare -`Depends` in the dependent function. - -{% import 'getting_started/dependencies/5.md' as includes with context %} -{{ includes }} - -!!! Tip "Caching" - In the example above, the `another_dependency` function will be called at **ONCE**! - **FastDepends** caches all dependency execution results within **ONE** `#!python @apply_types` call stack. - This means that all nested dependencies will receive the cached result of dependency execution. - But, between different calls of the main function, these results will be different. - - To prevent this behavior, just use `#!python Depends(..., cache=False)`. In this case, the dependency will be used for each function - in the call stack where it is used. - -## Use with Regular Functions - -You can use the decorator `#!python @apply_types` not only with `#!python @broker.subscriber(...)`, but also with regular functions, both synchronous and asynchronous. - -=== "Sync" - ```python hl_lines="3-4" linenums="1" -from faststream import Depends, apply_types - -def simple_dependency(a: int, b: int = 3): - return a + b - -@apply_types -def method(a: int, d: int = Depends(simple_dependency)): - return a + d - -def test_sync_dependency(): - assert method("1") == 5 - ``` - -=== "Async" - ```python hl_lines="5-6 8-9" linenums="1" -import asyncio -import pytest -from faststream import Depends, apply_types - -async def simple_dependency(a: int, b: int = 3): - return a + b - -def another_dependency(a: int): - return a - -@apply_types -async def method( - a: int, - b: int = Depends(simple_dependency), - c: int = Depends(another_dependency), -): - return a + b + c - -@pytest.mark.asyncio -async def test_async_dependency(): - assert 6 == await method("1") - ``` - - !!! tip "Be careful" - In asynchronous code, you can use both synchronous and asynchronous dependencies. - But in synchronous code, only synchronous dependencies are available to you. - -## Casting Dependency Types - -**FastDepends**, used by **FastStream**, also gives the type `return`. This means that the value returned by the dependency will be -be cast to the type twice: as `return` for dependencies and as the input argument of the main function. This does not incur additional costs if -these types have the same annotation. Just keep it in mind. Or not... Anyway, I've warned you. - -```python linenums="1" -from faststream import Depends, apply_types - -def simple_dependency(a: int, b: int = 3) -> str: - return a + b # 'return' is cast to `str` for the first time - -@inject -def method(a: int, d: int = Depends(simple_dependency)): - # 'd' is cast to `int` for the second time - return a + d - -assert method("1") == 5 -``` - -Also, the result of executing the dependency is cached. If you use this dependency in `N` functions, -this cached result will be converted to type `N` times (at the input to the function being used). - -To avoid problems with this, use [mypy](https://www.mypy-lang.org){.external-link target="_blank"} or just be careful with the annotation -of types in your project. diff --git a/search/docs/getting-started/dependencies/sub.md b/search/docs/getting-started/dependencies/sub.md deleted file mode 100644 index e69de29..0000000 diff --git a/search/docs/getting-started/dependencies/testing.md b/search/docs/getting-started/dependencies/testing.md deleted file mode 100644 index 3588282..0000000 --- a/search/docs/getting-started/dependencies/testing.md +++ /dev/null @@ -1,3 +0,0 @@ - - -https://lancetnik.github.io/FastDepends/tutorial/overrides/ diff --git a/search/docs/getting-started/dependencies/yield.md b/search/docs/getting-started/dependencies/yield.md deleted file mode 100644 index e69de29..0000000 diff --git a/search/docs/getting-started/index.md b/search/docs/getting-started/index.md deleted file mode 100644 index fe59cdb..0000000 --- a/search/docs/getting-started/index.md +++ /dev/null @@ -1,39 +0,0 @@ ---- -hide: - - toc -run_docker: To start a new project, we need a test broker container ---- - -# QUICK START - -Install using `pip`: - -{% import 'getting_started/index/install.md' as includes with context %} -{{ includes }} - -## Basic Usage - -To create a basic application, add the following code to a new file (e.g. `serve.py`): - -{! includes/getting_started/index/base.md !} - -And just run this command: - -```shell -faststream run serve:app -``` - -After running the command, you should see the following output: - -``` shell -INFO - FastStream app starting... -INFO - test | - `BaseHandler` waiting for messages -INFO - FastStream app started successfully! To exit, press CTRL+C -``` - -Enjoy your new development experience! - -??? tip "Don't forget to stop the test broker container" - ```bash - docker container stop test-mq - ``` diff --git a/search/docs/getting-started/integrations/fastapi/index.md b/search/docs/getting-started/integrations/fastapi/index.md deleted file mode 100644 index 724d94a..0000000 --- a/search/docs/getting-started/integrations/fastapi/index.md +++ /dev/null @@ -1,73 +0,0 @@ -# **FastAPI** Plugin - -## Handling messages - -**FastStream** can be used as a part of **FastAPI**. - -Just import a **StreamRouter** you need and declare the message handler in the same way as with a regular **FastStream** application. - -!!! tip - When used in this way, **FastStream** does not use its own dependency system but integrates into **FastAPI**. - That is, you can use `Depends`, `BackgroundTasks` and other original **FastAPI** features as if it were a regular HTTP endpoint, but you can't use `faststream.Context` and `faststream.Depends`. - - Note that the code below uses `fastapi.Depends`, not `faststream.Depends`. - -{! includes/getting_started/integrations/fastapi/1.md !} - -When processing a message from a broker, the entire message body is placed simultaneously in both the `body` and `path` request parameters. You can access them in any way convenient for you. The message header is placed in `headers`. - -Also, this router can be fully used as an `HttpRouter` (of which it is the inheritor). So, you can -use it to declare any `get`, `post`, `put` and other HTTP methods. For example, this is done at **line 19**. - -!!! warning - If your **ASGI** server does not support installing **state** inside **lifespan**, you can disable this behavior as follows: - - ```python - router = StreamRouter(..., setup_state=False) - ``` - - However, after that, you will not be able to access the broker from your application's **state** (but it is still available as the `router.broker`). - -## Accessing the Broker Object - -Inside each router, there is a broker. You can easily access it if you need to send a message to MQ: - -{! includes/getting_started/integrations/fastapi/2.md !} - -Also, you can use the following `Depends` to access the broker if you want to use it at different parts of your program: - -{! includes/getting_started/integrations/fastapi/3.md !} - -Or you can access the broker from a **FastAPI** application state (if you don't disable it with `#!python setup_state=False`): - -```python -from fastapi import Request - -@app.get("/") -def main(request: Request): - broker = request.state.broker -``` - -## `@after_startup` - -The `FastStream` application has the `#!python @after_startup` hook, which allows you to perform operations with your message broker after the connection is established. This can be extremely convenient for managing your brokers' objects and/or sending messages. This hook is also available for your **FastAPI StreamRouter** - -{! includes/getting_started/integrations/fastapi/4.md !} - -## Documentation - -When using **FastStream** as a router for **FastAPI**, the framework automatically registers endpoints for hosting **AsyncAPI** documentation into your application with the following default values: - -{! includes/getting_started/integrations/fastapi/5.md !} - -This way, you will have three routes to interact with your application's **AsyncAPI** schema: - -* `/asyncapi` - the same as the [CLI created page](../../../getting-started/asyncapi/hosting.md){.internal-link} -* `/asyncapi.json` - download the **JSON** schema representation -* `/asyncapi.yaml` - download the **YAML** schema representation - -## Testing - -To test your **FastAPI StreamRouter**, you can still use it with the *TestClient*: - -{! includes/getting_started/integrations/fastapi/6.md !} diff --git a/search/docs/getting-started/integrations/frameworks/index.md b/search/docs/getting-started/integrations/frameworks/index.md deleted file mode 100644 index 6f06f21..0000000 --- a/search/docs/getting-started/integrations/frameworks/index.md +++ /dev/null @@ -1,16 +0,0 @@ ---- -# template variables -fastapi_plugin: If you want to use **FastStream** in conjunction with **FastAPI**, perhaps you should use a special [plugin](../fastapi/index.md){.internal-link} -no_hook: However, even if such a hook is not provided, you can do it yourself. ---- - -# INTEGRATIONS - -**FastStream** brokers are very easy to integrate with any of your applications: -it is enough to initialize the broker at startup and close it correctly at the end of -your application. - -Most HTTP frameworks have built-in lifecycle hooks for this. - -{% import 'getting_started/integrations/http/1.md' as includes with context %} -{{ includes }} diff --git a/search/docs/getting-started/lifespan/hooks.md b/search/docs/getting-started/lifespan/hooks.md deleted file mode 100644 index c1ce42a..0000000 --- a/search/docs/getting-started/lifespan/hooks.md +++ /dev/null @@ -1,119 +0,0 @@ -# Lifespan Hooks - -## Usage example - -Let's imagine that your application uses **pydantic** as your settings manager. - -!!! note "" - I highly recommend using **pydantic** for these purposes, because this dependency is already used at **FastStream** - and you don't have to install an additional package - -Also, let's imagine that you have several `.env`, `.env.development`, `.env.test`, `.env.production` files with your application settings, -and you want to switch them at startup without any code changes. - -By [passing optional arguments with the command line](../config/index.md){.internal-link} to your code **FastStream** allows you to do this easily. - -## Lifespan - -Let's write some code for our example - -{! includes/getting_started/lifespan/1.md !} - -Now this application can be run using the following command to manage the environment: - -```bash -faststream run serve:app --env .env.test -``` - -### Details - -Now let's look into a little more detail - -To begin with, we used a decorator - -{! includes/getting_started/lifespan/2.md !} - -to declare a function that should run when our application starts - -The next step is to declare the arguments that our function will receive - -{! includes/getting_started/lifespan/3.md !} - -In this case, the `env` field will be passed to the `setup` function from the arguments with the command line - -!!! tip - The default lifecycle functions are used with the decorator `#!python @apply_types`, - therefore, all [context fields](../context/index.md){.internal-link} and [dependencies](../dependencies/index.md){.internal-link} are available in them - -Then, we initialized the settings of our application using the file passed to us from the command line - -{! includes/getting_started/lifespan/4.md !} - -And put these settings in a global context - -{! includes/getting_started/lifespan/5.md !} - -??? note - Now we can access our settings anywhere in the application right from the context - - ```python - from faststream import Context, apply_types - @apply_types - async def func(settings = Context()): ... - ``` - -The last step we initialized our broker: now, when the application starts, it will be ready to receive messages - -{! includes/getting_started/lifespan/6.md !} - -## Another example - -Now let's imagine that we have a machine learning model that needs to process messages from some broker. - -Initialization of such models usually takes a long time. It would be wise to do this at the start of the application, and not when processing each message. - -You can initialize your model somewhere at the top of your module/file. However, in this case, this code will be run even just in case of importing -this module, for example, during testing. It is unlikely that you want to run your model on every test run... - -Therefore, it is worth initializing the model in the `#!python @app.on_startup` hook. - -Also, we don't want the model to finish its work incorrectly when the application is stopped. To avoid this, we need the hook `#!python @app.on_shutdown` - -{! includes/getting_started/lifespan/7.md !} - -## Multiple hooks - -If you want to declare multiple lifecycle hooks, they will be used in the order they are registered: - -```python linenums="1" hl_lines="6 11" -from faststream import Context, ContextRepo, FastStream - -app = FastStream() - - -@app.on_startup -async def setup(context: ContextRepo): - context.set_global("field", 1) - - -@app.on_startup -async def setup_later(field: int = Context()): - assert field == 1 -``` - -## Some more details - -### Async or not async - -In the asynchronous version of the application, both asynchronous and synchronous methods can be used as hooks. -In the synchronous version, only synchronous methods are available. - -### Command line arguments - -Command line arguments are available in all `#!python @app.on_startup` hooks. To use them in other parts of the application, put them in the `ContextRepo`. - -### Broker initialization - -The `#!python @app.on_startup` hooks are called **BEFORE** the broker is launched by the application. The `#!python @app.after_shutdown` hooks are triggered **AFTER** stopping the broker. - -If you want to perform some actions **AFTER** initializing the broker: send messages, initialize objects, etc., you should use the `#!python @app.after_startup` hook. diff --git a/search/docs/getting-started/lifespan/index.md b/search/docs/getting-started/lifespan/index.md deleted file mode 100644 index f74e924..0000000 --- a/search/docs/getting-started/lifespan/index.md +++ /dev/null @@ -1,11 +0,0 @@ -# Lifespan Events - -Sometimes you need to define the logic that should be executed before launching the application. -This means that the code will be executed once - even before your application starts receiving messages. - -Also, you may need to terminate some processes after stopping the application. In this case, your code will also be executed exactly once: -but after the completion of the main application. - -Since this code is executed before the application starts and after it stops, it covers the entire lifecycle *(lifespan)* of the application. - -This can be very useful for initializing your application settings at startup, raising a pool of connections to a database, or running machine learning models. diff --git a/search/docs/getting-started/lifespan/test.md b/search/docs/getting-started/lifespan/test.md deleted file mode 100644 index 58887e1..0000000 --- a/search/docs/getting-started/lifespan/test.md +++ /dev/null @@ -1,12 +0,0 @@ -# Events Testing - -In the most cases you are testing your subsriber/publisher functions, but sometimes you need to trigger some lifespan hooks in your tests too. - -For this reason, **FastStream** has a special **TestApp** patcher working as a regular async context manager. - -{! includes/getting_started/lifespan/testing.md !} - -!!! tip - If you are using a connected broker inside withing your lifespan hooks, it's advisable to patch the broker first (before applying the application patch). - - Also, because `FastStream` calls `#!python broker.start()` inside, you need to prevent `TestClient` broker starting to respect the original lifespan hooks ordering by `#!python connect_only=True` option. Without this one, all `FastStream` hooks will be called after broker was started, what can breaks some `@app.on_startup` logic. diff --git a/search/docs/getting-started/logging.md b/search/docs/getting-started/logging.md deleted file mode 100644 index efd23eb..0000000 --- a/search/docs/getting-started/logging.md +++ /dev/null @@ -1,208 +0,0 @@ -# Application and Access Logging - -**FastStream** uses two previously configured loggers: - -* `faststream` - used by `FastStream` app -* `faststream.access` - used by the broker - -## Logging Requests - -To log requests, it is strongly recommended to use the `access_logger` of your broker, as it is available from the [Context](../getting-started/context/existed.md){.internal-link} of your application. - -```python -from faststream import Logger -from faststream.rabbit import RabbitBroker - -broker = RabbitBroker() - -@broker.subscriber("test") -async def func(logger: Logger): - logger.info("message received") -``` - -This approach offers several advantages: - -* The logger already contains the request context, including the message ID and broker-based parameters. -* By replacing the `logger` when initializing the broker, you will automatically replace all loggers inside your functions. - -## Logging Levels - -If you use the **FastStream CLI**, you can change the current logging level of the entire application directly from the command line. - -The `--log-level` flag sets the current logging level for both the broker and the `FastStream` app. This allows you to configure the levels of not only the default loggers but also your custom loggers, if you use them inside **FastStream**. - -```console -faststream run serve:app --log-level debug -``` - -If you want to completely disable the default logging of `FastStream`, you can set `logger=None` - -```python -from faststream import FastStream -from faststream.rabbit import RabbitBroker - -broker = RabbitBroker(logger=None) # Disables broker logs -app = FastStream(broker, logger=None) # Disables application logs -``` - -!!! warning - Be careful: the `logger` that you get from the context will also have the value `None` if you turn off broker logging. - -If you don't want to lose access to the `logger' inside your context but want to disable the default logs of **FastStream**, you can lower the level of logs that the broker publishes itself. - -```python -import logging -from faststream.rabbit import RabbitBroker - -# Sets the broker logs to the DEBUG level -broker = RabbitBroker(log_level=logging.DEBUG) -``` - -## Formatting Logs - -If you are not satisfied with the current format of your application logs, you can change it directly in your broker's constructor. - -```python -from faststream.rabbit import RabbitBroker -broker = RabbitBroker(log_fmt="%(asctime)s %(levelname)s - %(message)s") -``` - -## Logger Access - -If you want to override default logger's behavior, you can access them directly via `logging`. - -```python -import logging -logger = logging.getLogger("faststream") -access_logger = logging.getLogger("faststream.access") -``` - -Or you can import them from **FastStream**. - -```python -from faststream.log import access_logger, logger -``` - -## Using Your Own Loggers - -Since **FastStream** works with the standard `logging.Logger` object, you can initiate an application and a broker -using your own logger. - -```python -import logging -from faststream import FastStream -from faststream.rabbit import RabbitBroker - -logger = logging.getLogger("my_logger") - -broker = RabbitBroker(logger=logger) -app = FastStream(broker, logger=logger) -``` - -!!! note - Doing this, you doesn't change the **CLI** logs behavior (*multiprocessing* and *hot reload* logs). This was done to keep your log storage clear of unnecessary stuff. - - This logger will be used only for `FastStream` and `StreamBroker` service messages and will be passed to your function through the **Context**. - -By doing this, you will lose information about the context of the current request. However, you can retrieve it directly from the context anywhere in your code. - -```python -from faststream import context -log_context: dict[str, str] = context.get_local("log_context") -``` - -This way, all broker handlers can get access to your broker logger right from the context: - -```python -from faststream import Logger - -@broker.subscriber(...) -async def handler( - msg, - logger: Logger, # <-- YOUR logger here -): - logger.info(msg) -``` - -### Structlog Example - -[**Structlog**](https://www.structlog.org/en/stable){.external-link target="_blank"} is a production-ready logging solution for Python. It can be easely integrated with any log storage system, making it suitable for use in production projects. - -Here is a quick tutorial on integrating **Structlog** with **FastStream**: - -Start with the **Structlog** [guide](https://www.structlog.org/en/stable/logging-best-practices.html#pretty-printing-vs-structured-output){.external-link target="_blank"} example: - -```python linenums="1" hl_lines="11 14 20" -import sys -import structlog - -shared_processors = [ - structlog.processors.add_log_level, - structlog.processors.StackInfoRenderer(), - structlog.dev.set_exc_info, - structlog.processors.TimeStamper(fmt="iso"), -] - -if sys.stderr.isatty(): - # terminal session - processors = shared_processors + [ - structlog.dev.ConsoleRenderer() - ] -else: - # Docker container session - processors = shared_processors + [ - structlog.processors.dict_tracebacks, - structlog.processors.JSONRenderer(), - ] - -structlog.configure( - processors=processors, - logger_factory=structlog.PrintLoggerFactory(), - cache_logger_on_first_use=False, -) - -logger = structlog.get_logger() -``` - -We created a logger that prints messages to the console in a user-friendly format during development and uses **JSON**-formatted logs in production. - -To integrate this logger with our **FastStream** application, we just need to access it through context information and pass it to our objects: - -```python linenums="1" hl_lines="11 15 20 26-27" -import logging - -import structlog - -from faststream import FastStream, context -from faststream.kafka import KafkaBroker - -def merge_contextvars( - logger: structlog.types.WrappedLogger, - method_name: str, - event_dict: structlog.types.EventDict, -) -> structlog.types.EventDict: - event_dict["extra"] = event_dict.get( - "extra", - context.get("log_context", {}), - ) - return event_dict - -shared_processors = [ - merge_contextvars, - ... -] - -... - -broker = KafkaBroker(logger=logger, log_level=logging.DEBUG) -app = FastStream(broker, logger=logger) -``` - -And the job is done! Now you have a perfectly structured logs using **Structlog**. - -```bash -TIMESPAMP [info ] FastStream app starting... extra={} -TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group', 'message_id': ''} -TIMESPAMP [debug ] `Handler` waiting for messages extra={'topic': 'topic', 'group_id': 'group2', 'message_id': ''} -TIMESPAMP [info ] FastStream app started successfully! To exit, press CTRL+C extra={'topic': '', 'group_id': '', 'message_id': ''} -``` diff --git a/search/docs/getting-started/middlewares/index.md b/search/docs/getting-started/middlewares/index.md deleted file mode 100644 index 45d4037..0000000 --- a/search/docs/getting-started/middlewares/index.md +++ /dev/null @@ -1,85 +0,0 @@ -# Middlewares - -**Middlewares** are a powerful mechanism that allows you to add additional logic to any stage of the message processing pipeline. - -This way, you can greatly extend your **FastStream** application with features such as: - -* Integration with any logging/metrics systems -* Application-level message serialization logic -* Rich publishing of messages with extra information -* And many other capabilities - -**Middlewares** have several methods to override. You can implement some or all of them and use middlewares at the broker, router, or subscriber level. Thus, middlewares are the most flexible **FastStream** feature. - -## Message Receive Wrapper - -Unfortunately, this powerful feature has a somewhat complex signature too. - -Using middlewares, you can wrap the entire message processing pipeline. In this case, you need to specify `on_receive` and `after_processed` methods: - -``` python -from faststream import BaseMiddleware - -class MyMiddleware(BaseMiddleware): - async def on_receive(self): - print(f"Received: {self.message}") - return await super().on_receive() - - async def after_processed(self, exc_type, exc_val, exec_tb): - return await super().after_processed(exc_type, exc_val, exec_tb) -``` - -These methods should be overwritten only in a broker-level middlewares. - -```python -Broker(middlewares=[MyMiddleware]) -``` - -In other cases, `on_receive` will be called at every subscriber filter function call. - -!!! tip - Please always call `#!python super()` methods at the end of your function; this is important for correct error processing. - -## Message Consuming Wrapper - -Also, using middlewares, you are able to wrap consumer function calls directly. - -In this case, you need to specify `on_receive` and `after_processed` methods: - -``` python -from typing import Optional - -from faststream import BaseMiddleware: -from faststream.types import DecodedMessage - -class MyMiddleware(BaseMiddleware): - async def on_consume(self, msg: DecodedMessage) -> DecodedMessage: - return await super().on_consume(msg) - - async def after_consume(self, err: Optional[Exception]) -> None: - return await super().after_consume(err) -``` - -This way, you can patch the incoming message body right before passing it to your consumer subscriber. - -Also, if you have multiple filters for one subscriber, these methods will be called at once when the filtering is completed successfully. - -## Message Publishing Wrapper - -Finally, using middlewares, you are able to patch outgoing messages too. For example, you can compress/encode outgoing messages at the application level. - -In this, case you need to specify `on_publish` and `after_publish` methods: - -``` python -from typing import Optional - -from faststream import BaseMiddleware: -from faststream.types import SendableMessage - -class MyMiddleware(BaseMiddleware): - async def on_publish(self, msg: SendableMessage) -> SendableMessage: - return await super().on_publish(msg) - - async def after_publish(self, err: Optional[Exception]) -> None: - return await super().after_publish(err) -``` diff --git a/search/docs/getting-started/publishing/broker.md b/search/docs/getting-started/publishing/broker.md deleted file mode 100644 index 0a3f1d0..0000000 --- a/search/docs/getting-started/publishing/broker.md +++ /dev/null @@ -1,20 +0,0 @@ -# Broker Publishing - -The easiest way to publish a message is to use a Broker, which allows you to use it as a publisher client in any applications. - -In the **FastStream** project, this call is not represented in the **AsyncAPI** scheme. You can use it to send rarely-publishing messages, such as startup or shutdown events. - -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/broker_nats.py !} - ``` diff --git a/search/docs/getting-started/publishing/decorator.md b/search/docs/getting-started/publishing/decorator.md deleted file mode 100644 index 3239ec1..0000000 --- a/search/docs/getting-started/publishing/decorator.md +++ /dev/null @@ -1,34 +0,0 @@ -# Publisher Decorator - -The second easiest way to publish messages is by using the Publisher Decorator. This method has an AsyncAPI representation and is suitable for quickly creating applications. However, it doesn't provide all testing features. - -It creates a structured DataPipeline unit with an input and output. The order of Subscriber and Publisher decorators doesn't matter, but they can only be used with functions decorated by a `subscriber` as well. - -It uses the handler function's return type annotation to cast the function's return value before sending, so be accurate with it: - -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/decorator_nats.py !} - ``` - -It can be used multiple times with one function to broadcast the function's return: - -```python -@broker.subscriber("in") -@broker.publisher("first-out") -@broker.publisher("second-out") -async def handle(msg) -> str: - return "Response" -``` - -Additionally, it automatically sends a message with the same `correlation_id` as the incoming message. This way, you get the same `correlation_id` for the entire message pipeline process across all services, allowing you to collect a trace. diff --git a/search/docs/getting-started/publishing/direct.md b/search/docs/getting-started/publishing/direct.md deleted file mode 100644 index d478289..0000000 --- a/search/docs/getting-started/publishing/direct.md +++ /dev/null @@ -1,27 +0,0 @@ -# Publisher Direct Usage - -The Publisher Direct Usage is a full-featured way to publish messages. It has AsyncAPI representation and includes testable features. This method creates a reusable Publisher object that can be used directly to publish a message: - -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_nats.py !} - ``` - -It is suitable for publishing different messages to different outputs within the same processing function: - -```python -@broker.subscriber("in") -async def handle(msg) -> str: - await publisher1.publish("Response-1") - await publisher2.publish("Response-2") -``` diff --git a/search/docs/getting-started/publishing/index.md b/search/docs/getting-started/publishing/index.md deleted file mode 100644 index 76a833a..0000000 --- a/search/docs/getting-started/publishing/index.md +++ /dev/null @@ -1,35 +0,0 @@ -# Publishing Basics - -**FastStream** is broker-agnostic and easy to use, even as a client in non-FastStream applications. - -It offers several use cases for publishing messages: - -* Using `broker.publish`` -* Using a decorator -* Using a publisher object decorator -* Using a publisher object directly - -**FastStream** allows you to publish any JSON-serializable messages (Python types, Pydantic models, etc.) or raw bytes. - -It automatically sets up all required headers, especially the correlation_id, which is used to trace message processing pipelines across all services. - -To publish a message, simply set up the message content and a routing key: - -=== "Kafka" - ```python - async with KafkaBroker() as br: - await br.publish("message", "topic") - ``` - -=== "RabbitMQ" - ```python - async with RabbitBroker() as br: - await br.publish("message", "queue") - ``` - - -=== "NATS" - ```python - async with NatsBroker() as br: - await br.publish("message", "queue") - ``` diff --git a/search/docs/getting-started/publishing/object.md b/search/docs/getting-started/publishing/object.md deleted file mode 100644 index 0f2f9ae..0000000 --- a/search/docs/getting-started/publishing/object.md +++ /dev/null @@ -1,34 +0,0 @@ -# Publisher Object - -The Publisher Object provides a full-featured way to publish messages. It has **AsyncAPI** representation and includes testable features. This method creates a reusable Publisher object. - -It can be used as a function decorator. The order of Subscriber and Publisher decorators doesn't matter, but they can only be used with functions decorated by a `subscriber` decorator. - -It also uses the handler function's return type annotation to cast the function's return value before sending, so be accurate with it: - -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats.py !} - ``` - -You can use it multiple times with one function to broadcast the function's return: - -```python -@publisher1 -@publisher2 -@broker.subscriber("in") -async def handle(msg) -> str: - return "Response" -``` - -Additionally, it automatically sends a message with the same `correlation_id` as the incoming message. This way, you get the same correlation_id for the entire message pipeline process across all services, allowing you to collect a trace. diff --git a/search/docs/getting-started/publishing/test.md b/search/docs/getting-started/publishing/test.md deleted file mode 100644 index aae0f45..0000000 --- a/search/docs/getting-started/publishing/test.md +++ /dev/null @@ -1,62 +0,0 @@ -# Publisher Testing - -If you are working with a Publisher object (either decorator or direct), you can check outgoing messages as well. There are several testing features available: - -* In-memory TestClient -* Publishing (including error handling) -* Checking the incoming message body -* Note about mock clearing after the context exits - -## Base application - -=== "Decorator" -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka.py[ln:7-12] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit.py[ln:7-12] !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats.py[ln:7-12] !} - ``` - -=== "Direct" -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_kafka.py[ln:7-11] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_rabbit.py[ln:7-11] !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/direct_nats.py[ln:7-11] !} - ``` - -## Testing - -=== "Kafka" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_kafka_testing.py [ln:1-3,7-12] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_rabbit_testing.py [ln:1-3,7-12] !} - ``` - -=== "NATS" - ```python linenums="1" - {!> docs_src/getting_started/publishing/object_nats_testing.py [ln:1-3,7-12] !} - ``` - -* Testing with a real broker -* Waiting for the consumer to be called diff --git a/search/docs/getting-started/routers/index.md b/search/docs/getting-started/routers/index.md deleted file mode 100644 index cba596a..0000000 --- a/search/docs/getting-started/routers/index.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -# template variables -note_decor: Now you can use the created router to register handlers and publishers as if it were a regular broker -note_include: Then you can simply include all the handlers declared using the router in your broker -note_publish: Please note that when publishing a message, you now need to specify the same prefix that you used when creating the router ---- - -# Broker Router - -Sometimes you want to: - -* split an application into includable modules -* separate business logic from your handler registration -* apply some [decoder](../serialization/index.md)/[middleware](../middlewares/index.md)/[dependencies](../dependencies/global.md) to a subscribers group - -For these reasons, **FastStream** has a special *Broker Router*. - -## Router Usage - -First, you need to import the *Broker Router* from the same module from where you imported the broker. - -!!! note "" - When creating a *Broker Router*, you can specify a prefix that will be automatically applied to all subscribers and publishers of this router. - -{% import 'getting_started/routers/1.md' as includes with context %} -{{ includes }} - -!!! tip - Also, when creating a *Broker Router*, you can specify [middleware](../middlewares/index.md), [dependencies](../dependencies/global.md), [parser](../serialization/parser.md) and [decoder](../serialization/decoder.md) to apply them to all subscribers declared via this router. - -## Delay Handler Registration - -If you want to separate your application's core logic from **FastStream**'s routing logic, you can write some core functions and use them as *Broker Router* `handlers` later: - -=== "Kafka" - ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_kafka.py [ln:1-15] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_rabbit.py [ln:1-15] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="2 8 13 15" - {!> docs_src/getting_started/routers/router_delay_nats.py [ln:1-15] !} - ``` - -!!! warning - Be careful, this way you won't be able to test your handlers with a [`mock`](../subscription/test.md) object. diff --git a/search/docs/getting-started/serialization/decoder.md b/search/docs/getting-started/serialization/decoder.md deleted file mode 100644 index d3aac74..0000000 --- a/search/docs/getting-started/serialization/decoder.md +++ /dev/null @@ -1,84 +0,0 @@ -# Custom Decoder - -At this stage, the body of a **StreamMessage** is transformed into the format that it will take when it enters your handler function. This stage is the one you will need to redefine more often. - -## Signature - -The original decoder function has a relatively simple signature (this is a simplified version): - -=== "Kafka" - ``` python - from faststream.types import DecodedMessage - from faststream.kafka import KafkaMessage - - def decoder(msg: KafkaMessage) -> DecodedMessage: - ... - ``` - -=== "RabbitMQ" - ``` python - from faststream.types import DecodedMessage - from faststream.rabbit import RabbitMessage - - def decoder(msg: RabbitMessage) -> DecodedMessage: - ... - ``` - -=== "NATS" - ``` python - from faststream.types import DecodedMessage - from faststream.nats import NatsMessage - - def decoder(msg: NatsMessage) -> DecodedMessage: - ... - ``` - -Alternatively, you can reuse the original decoder function with the following signature: - -=== "Kafka" - ``` python - from types import Callable, Awaitable - from faststream.types import DecodedMessage - from faststream.kafka import KafkaMessage - - async def decoder( - msg: KafkaMessage, - original_decoder: Callable[[KafkaMessage], Awaitable[DecodedMessage]], - ) -> DecodedMessage: - return await original_decoder(msg) - ``` - -=== "RabbitMQ" - ``` python - from types import Callable, Awaitable - from faststream.types import DecodedMessage - from faststream.rabbit import RabbitMessage - - async def decoder( - msg: RabbitMessage, - original_decoder: Callable[[RabbitMessage], Awaitable[DecodedMessage]], - ) -> DecodedMessage: - return await original_decoder(msg) - ``` - -=== "NATS" - ``` python - from types import Callable, Awaitable - from faststream.types import DecodedMessage - from faststream.nats import NatsMessage - - async def decoder( - msg: NatsMessage, - original_decoder: Callable[[NatsMessage], Awaitable[DecodedMessage]], - ) -> DecodedMessage: - return await original_decoder(msg) - ``` - -!!! note - The original decoder is always an asynchronous function, so your custom decoder should also be asynchronous. - -Afterward, you can set this custom decoder at the broker or subscriber level. - -## Example - -You can find examples of *Protobuf* and *Msgpack* serialization in the [next article](./examples.md){.internal-link}. diff --git a/search/docs/getting-started/serialization/examples.md b/search/docs/getting-started/serialization/examples.md deleted file mode 100644 index 9283306..0000000 --- a/search/docs/getting-started/serialization/examples.md +++ /dev/null @@ -1,123 +0,0 @@ -# Serialization examples - -## Protobuf - -In this section, we will explore an example using *Protobuf*. However, this approach is also applicable to other serialization methods. - -???- note "Protobuf" - *Protobuf* is an alternative message serialization method commonly used in *GRPC*. Its main advantage is that it results in much smaller message sizes[^1] compared to *JSON*, but it requires a message schema (`.proto` files) on both the client and server sides. - -To begin, install the necessary dependencies: - -```console -pip install grpcio-tools -``` - -Next, let's define the schema for our message: - -```proto title="message.proto" -syntax = "proto3"; - -message Person { - string name = 1; - float age = 2; -} -``` - -Now, generate a *Python* class to work with messages in *Protobuf format*: - -```console -python -m grpc_tools.protoc --python_out=. --pyi_out=. -I . message.proto -``` - -This generates two files: `message_pb2.py` and `message_pb2.pyi`. We can use the generated class to serialize our messages: - -``` python linenums="1" hl_lines="1 10-13 16 23" -from message_pb2 import Person - -from faststream import FastStream, Logger, NoCast -from faststream.rabbit import RabbitBroker, RabbitMessage - -broker = RabbitBroker() -app = FastStream(broker) - - -async def decode_message(msg: RabbitMessage) -> Person: - decoded = Person() - decoded.ParseFromString(msg.body) - return decoded - - -@broker.subscriber("test", decoder=decode_message) -async def consume(body: NoCast[Person], logger: Logger): - logger.info(body) - - -@app.after_startup -async def publish(): - body = Person(name="John", age=25).SerializeToString() - await broker.publish(body, "test") -``` - -Note that we used the `NoCast` annotation to exclude the message from the `pydantic` representation of our handler. - -``` python -async def consume(body: NoCast[Person], logger: Logger): -``` - -## Msgpack - -*Msgpack* is another alternative binary data format. Its main advantage is that it results in smaller message sizes[^2] compared to *JSON*, although slightly larger than *Protobuf*. The key advantage is that it doesn't require a message schema, making it easy to use in most cases. - -To get started, install the necessary dependencies: - -```console -pip install msgpack -``` - -Since there is no need for a schema, you can easily write a *Msgpack* decoder: - -``` python linenums="1" hl_lines="1 10-11 14 21" -import msgpack - -from faststream import FastStream, Logger -from faststream.rabbit import RabbitBroker, RabbitMessage - -broker = RabbitBroker() -app = FastStream(broker) - - -async def decode_message(msg: RabbitMessage): - return msgpack.loads(msg.body) - - -@broker.subscriber("test", decoder=decode_message) -async def consume(name: str, age: int, logger: Logger): - logger.info(f"{name}: {age}") - - -@app.after_startup -async def publish(): - body = msgpack.dumps({"name": "John", "age": 25}, use_bin_type=True) - await broker.publish(body, "test") -``` - -Using *Msgpack* is much simpler than using *Protobuf* schemas. Therefore, if you don't have strict message size limitations, you can use *Msgpack* serialization in most cases. - -## Tips - -### Data Compression - -If you are dealing with very large messages, consider compressing them as well. You can explore libraries such as [**lz4**](https://github.com/python-lz4/python-lz4){.external-link targer="_blank"} or [**zstd**](https://github.com/sergey-dryabzhinsky/python-zstd){.external-link targer="_blank"} for compression algorithms. - -Compression can significantly reduce message size, especially if there are repeated blocks. However, in the case of small message bodies, data compression may increase the message size. Therefore, you should assess the compression impact based on your specific application requirements. - -### Broker-Level Serialization - -You can still set a custom `decoder` at the Broker or Router level. However, if you want to automatically encode publishing messages as well, you should explore [Middleware](../middlewares/index.md){.internal-link} for serialization implimentation. - -[^1]: - For example, a message like `#!json { "name": "John", "age": 25 }` in *JSON* takes **27** bytes, while in *Protobuf*, it takes only **11** bytes. With lists and more complex structures, the savings can be even more significant (up to 20x times). - -[^2]: - A message with *Msgpack* serialization, such as `#!json { "name": "John", "age": 25 }`, takes **16** bytes. diff --git a/search/docs/getting-started/serialization/index.md b/search/docs/getting-started/serialization/index.md deleted file mode 100644 index d2ddd23..0000000 --- a/search/docs/getting-started/serialization/index.md +++ /dev/null @@ -1,20 +0,0 @@ -# Custom Serialization - -By default, **FastStream** uses the *JSON* format to send and receive messages. However, if you need to handle messages in other formats or with additional serialization steps, such as *gzip*, *lz4*, *Avro*, *Protobuf* or *Msgpack*, you can easily modify the serialization logic. - -## Serialization Steps - -Before the message reaches your subscriber, **FastStream** applies two functions to it sequentially: `parse_message` and `decode_message`. You can modify one or both stages depending on your needs. - -### Message Parsing - -At this stage, **FastStream** serializes an incoming message from the broker's framework into a general format called - **StreamMessage**. During this stage, the message body remains in the form of raw bytes. - -!!! warning "" - This stage is closely related to the features of the broker used, and in most cases, redefining it is not necessary. - -The parser declared at the `broker` level will be applied to all subscribers. The parser declared at the `subscriber` level is applied only to that specific subscriber and overrides the `broker' parser if specified. - -### Message Decoding - -At this stage, the body of the **StreamMessage** is transformed into a format suitable for processing within your subscriber function. This is the stage you may need to redefine more often. diff --git a/search/docs/getting-started/serialization/parser.md b/search/docs/getting-started/serialization/parser.md deleted file mode 100644 index 9819d7e..0000000 --- a/search/docs/getting-started/serialization/parser.md +++ /dev/null @@ -1,106 +0,0 @@ -# Custom Parser - -At this stage, **FastStream** serializes an incoming message from the broker's framework into a general format called **StreamMessage**. During this stage, the message body remains in the form of raw bytes. - -**StreamMessage** is a general representation of a message within **FastStream**. It contains all the information required for message processing within **FastStreams**. It is even used to represent message batches, so the primary reason to customize it is to redefine the metadata associated with **FastStream** messages. - -For example, you can specify your own header with the `message_id` semantic. This allows you to inform **FastStream** about this custom header through parser customization. - -## Signature - -To create a custom message parser, you should write a regular Python function (synchronous or asynchronous) with the following signature: - -=== "Kafka" - ``` python - from aiokafka import ConsumerRecord - from faststream.kafka import KafkaMessage - - def parser(msg: ConsumerRecord) -> KafkaMessage: - ... - ``` - -=== "RabbitMQ" - ``` python - from aio_pika import IncomingMessage - from faststream.rabbit import RabbitMessage - - def parser(msg: IncomingMessage) -> RabbitMessage: - ... - ``` - -=== "NATS" - ``` python - from nats.aio.msg import Msg - from faststream.nats import NatsMessage - - def parser(msg: Msg) -> NatsMessage: - ... - ``` - -Alternatively, you can reuse the original parser function with the following signature: - -=== "Kafka" - ``` python - from types import Callable, Awaitable - from aiokafka import ConsumerRecord - from faststream.kafka import KafkaMessage - - async def parser( - msg: ConsumerRecord, - original_parser: Callable[[ConsumerRecord], Awaitable[KafkaMessage]], - ) -> KafkaMessage: - return await original_parser(msg) - ``` - -=== "RabbitMQ" - ``` python - from types import Callable, Awaitable - from aio_pika import IncomingMessage - from faststream.rabbit import RabbitMessage - - async def parser( - msg: IncomingMessage, - original_parser: Callable[[IncomingMessage], Awaitable[RabbitMessage]], - ) -> RabbitMessage: - return await original_parser(msg) - ``` - -=== "NATS" - ``` python - from types import Callable, Awaitable - from nats.aio.msg import Msg - from faststream.nats import NatsMessage - - async def parser( - msg: Msg, - original_parser: Callable[[Msg], Awaitable[NatsMessage]], - ) -> RabbitMessage: - return await original_parser(msg) - ``` - -The argument naming doesn't matter; the parser will always be placed as the second argument. - -!!! note - The original parser is always an asynchronous function, so your custom parser should also be asynchronous. - -Afterward, you can set this custom parser at the broker or subscriber level. - -## Example - -As an example, let's redefine `message_id` to a custom header: - - -=== "Kafka" - ``` python linenums="1" hl_lines="9-15 18 28" - {!> docs_src/getting_started/serialization/parser_kafka.py !} - ``` - -=== "RabbitMQ" - ``` python linenums="1" hl_lines="9-15 18 28" - {!> docs_src/getting_started/serialization/parser_rabbit.py !} - ``` - -=== "NATS" - ``` python linenums="1" hl_lines="9-15 18 28" - {!> docs_src/getting_started/serialization/parser_nats.py !} - ``` diff --git a/search/docs/getting-started/subscription/annotation.md b/search/docs/getting-started/subscription/annotation.md deleted file mode 100644 index 2ac5b8a..0000000 --- a/search/docs/getting-started/subscription/annotation.md +++ /dev/null @@ -1,61 +0,0 @@ -# Annotation Serialization - -## Basic usage - -As you already know, **FastStream** serializes your incoming message body according to the function type annotations using [**Pydantic**](https://docs.pydantic.dev){.external-link target="_blank"}. - -So, there are some valid usecases: - -```python -@broker.subscriber("test") -async def handle(msg: str): - ... - -@broker.subscriber("test") -async def handle(msg: bytes): - ... - -@broker.subscriber("test") -async def handle(msg: int): - ... -``` - -As with other Python primitive types as well (`#!python float`, `#!python bool`, `#!python datetime`, etc) - -!!! note - If the incoming message cannot be serialized by the described schema, **FastStream** raises a `pydantic.ValidationError` with a correct log message. - -Also, thanks to **Pydantic** (again), **FastStream** is able to serialize (and validate) more complex types like `pydantic.HttpUrl`, `pydantic.PostitiveInt`, etc. - -## JSON Basic Serialization - -But how can we serialize more complex message, like `#!json { "name": "John", "user_id": 1 }` ? - -For sure, we can serialize it as a simple `#!python dict` - -```python -from typing import Dict, Any - -@broker.subscriber("test") -async def handle(msg: dict[str, Any]): - ... -``` - -But it doesn't looks like a correct message validation, does it? - -For this reason, **FastStream** supports per-argument message serialization: you can declare multiple arguments with various types and your message will unpack to them: - -=== "Kafka" - ```python - {!> docs_src/getting_started/subscription/annotation_kafka.py [ln:8-11] !} - ``` - -=== "RabbitMQ" - ```python - {!> docs_src/getting_started/subscription/annotation_rabbit.py [ln:8-11] !} - ``` - -=== "NATS" - ```python - {!> docs_src/getting_started/subscription/annotation_nats.py [ln:8-11] !} - ``` diff --git a/search/docs/getting-started/subscription/filtering.md b/search/docs/getting-started/subscription/filtering.md deleted file mode 100644 index 661facc..0000000 --- a/search/docs/getting-started/subscription/filtering.md +++ /dev/null @@ -1,60 +0,0 @@ -# Application-level Filtering - -**FastStream** also allows you to specify the message processing way using message headers, body type or something else. The `filter` feature enables you to consume various messages with different schemas within a single event stream. - -!!! tip - Message must be consumed at ONCE (crossing filters are not allowed) - -As an example, let's create a subscriber for both `JSON` and non-`JSON` messages: - -=== "Kafka" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:1-19] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:1-19] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="10 17" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:1-19] !} - ``` - -!!! note - A subscriber without a filter is a default subscriber. It consumes messages that have not been consumed yet. - -For now, the following message will be delivered to the `handle` function - -=== "Kafka" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:24-27] !} - ``` - -=== "RabbitMQ" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:24-27] !} - ``` - -=== "NATS" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:24-27] !} - ``` - -And this one will be delivered to the `default_handler` - -=== "Kafka" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_kafka.py [ln:29-32] !} - ``` - -=== "RabbitMQ" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_rabbit.py [ln:29-32] !} - ``` - -=== "NATS" - ```python hl_lines="2" - {!> docs_src/getting_started/subscription/filter_nats.py [ln:29-32] !} - ``` diff --git a/search/docs/getting-started/subscription/index.md b/search/docs/getting-started/subscription/index.md deleted file mode 100644 index a155fff..0000000 --- a/search/docs/getting-started/subscription/index.md +++ /dev/null @@ -1,138 +0,0 @@ -# Subscription Basics - -**FastStream** provides a Message Broker agnostic way to subscribe to event streams. - -You need not even know about topics/queues/subjects or any broker inner objects you use. -The basic syntax is the same for all brokers: - -=== "Kafka" - ```python - from faststream.kafka import KafkaBroker - - broker = KafkaBroker() - - @broker.subscriber("test") # topic name - async def handle_msg(msg_body): - ... - ``` - -=== "RabbitMQ" - ```python - from faststream.rabbit import RabbitBroker - - broker = RabbitBroker() - - @broker.subscriber("test") # queue name - async def handle_msg(msg_body): - ... - ``` - -=== "NATS" - ```python - from faststream.nats import NatsBroker - - broker = NatsBroker() - - @broker.subscriber("test") # subject name - async def handle_msg(msg_body): - ... - ``` - -!!! tip - If you want to use Message Broker specific features, please visit the corresponding broker documentation section. - In the **Tutorial** section, the general features are described. - -Also, synchronous functions are supported as well: - -=== "Kafka" - ```python - from faststream.kafka import KafkaBroker - - broker = KafkaBroker() - - @broker.subscriber("test") # topic name - def handle_msg(msg_body): - ... - ``` - -=== "RabbitMQ" - ```python - from faststream.rabbit import RabbitBroker - - broker = RabbitBroker() - - @broker.subscriber("test") # queue name - def handle_msg(msg_body): - ... - ``` - -=== "NATS" - ```python - from faststream.nats import NatsBroker - - broker = NatsBroker() - - @broker.subscriber("test") # subject name - def handle_msg(msg_body): - ... - ``` - -## Message Body Serialization - -Generally, **FastStream** uses your function type annotation to serialize incoming message body with [**Pydantic**](https://docs.pydantic.dev){.external-link target="_blank"}. This is similar to how [**FastAPI**](https://fastapi.tiangolo.com){.external-link target="_blank"} works (if you are familiar with it). - -```python -@broker.subscriber("test") -async def handle_str(msg_body: str): - ... -``` - -You can also access some extra features through the function arguments, such as [Depends](../dependencies/index.md){.internal-link} and [Context](../context/existed.md){.internal-link} if required. - -However, you can easily disable Pydantic validation by creating a broker with the following option `#!python Broker(apply_types=False)` (this also disables Context and Depends features). - -This way **FastStream** still consumes `#!python json.loads` result, but without pydantic validation and casting. - -=== "Kafka" - ```python - from faststream.kafka import KafkaBroker - - broker = KafkaBroker(apply_types=False) - - @broker.subscriber("test") - async def handle_msg(msg_body: str): # just an annotation, has no real effect - ... - ``` - -=== "RabbitMQ" - ```python - from faststream.rabbit import RabbitBroker - - broker = RabbitBroker(apply_types=False) - - @broker.subscriber("test") - async def handle_msg(msg_body: str): # just an annotation, has no real effect - ... - ``` - -=== "NATS" - ```python - from faststream.nats import NatsBroker - - broker = NatsBroker(apply_types=False) - - @broker.subscriber("test") - async def handle_msg(msg_body: str): # just an annotation, has no real effect - ... - ``` - -## Multiple Subscriptions - -You can also subscribe to multiple event streams at the same time with one function. Just wrap it with multiple `#!python @broker.subscriber(...)` decorators (they have no effect on each other). - -```python -@broker.subscriber("first_sub") -@broker.subscriber("second_sub") -async def handler(msg): - ... -``` diff --git a/search/docs/getting-started/subscription/pydantic.md b/search/docs/getting-started/subscription/pydantic.md deleted file mode 100644 index 24ee8fd..0000000 --- a/search/docs/getting-started/subscription/pydantic.md +++ /dev/null @@ -1,43 +0,0 @@ -# Pydantic Serialization - -## pydantic.Field - -Besides, **FastStream** uses your handlers' annotations to collect information about the application schema and generate [**AsyncAPI**](https://www.asyncapi.com){.external-link target="_blank"} schema. - -You can access this information with extra details using `pydantic.Field` (such as title, description and examples). Additionally, [**Fields**](https://docs.pydantic.dev/latest/usage/fields/){.external-link target="_blank"} usage allows you to add extra validations to your message schema. - -Just use `pydantic.Field` as a function default argument: - -=== "Kafka" - ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="12-17" - {!> docs_src/getting_started/subscription/pydantic_fields_nats.py !} - ``` - -## pydantic.BaseModel - -To make your message schema reusable between different subscribers and publishers, you can decalre it as a `pydantic.BaseModel` and use it as a single message annotation: - -=== "Kafka" - ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 10 20" - {!> docs_src/getting_started/subscription/pydantic_model_nats.py !} - ``` diff --git a/search/docs/getting-started/subscription/test.md b/search/docs/getting-started/subscription/test.md deleted file mode 100644 index 55f7425..0000000 --- a/search/docs/getting-started/subscription/test.md +++ /dev/null @@ -1,152 +0,0 @@ -# Subscriber Testing - -Testability is a crucial part of any application, and **FastStream** provides you with the tools to test your code easily. - -## Original Application - -Let's take a look at the original application to test - -=== "Kafka" - ```python linenums="1" title="annotation_kafka.py" - {!> docs_src/getting_started/subscription/annotation_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" title="annotation_rabbit.py" - {!> docs_src/getting_started/subscription/annotation_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" title="annotation_rabbit.py" - {!> docs_src/getting_started/subscription/annotation_nats.py !} - ``` - -It consumes **JSON** messages like `#!json { "name": "username", "user_id": 1 }` - -You can test your consume function like a regular one, for sure: - -```python -@pytest.mark.asyncio -async def test_handler(): - await handle("John", 1) -``` - -But if you want to test your function closer to your real runtime, you should use the special **FastStream** test client. - -## In-Memory Testing - -Deploying a whole service with a Message Broker is a bit too much just for testing purposes, especially in your CI environment. Not to mention the possible loss of messages due to network failures when working with real brokers. - -For this reason, **FastStream** has a special `TestClient` to make your broker work in `InMemory` mode. - -Just use it like a regular async context manager - all published messages will be routed in-memory (without any external dependencies) and consumed by the correct handler. - -=== "Kafka" - ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:1-12] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:1-12] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="4 11-12" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:1-12] !} - ``` - -### Catching Exceptions - -This way you can catch any exceptions that occur inside your handler: - -=== "Kafka" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:18-23] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:18-23] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="4" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:18-23] !} - ``` - -### Validates Input - -Also, your handler has a mock object to validate your input or call counts. - -=== "Kafka" - ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-14] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-14] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="6" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-14] !} - ``` - -!!! note - The Handler mock has a not-serialized **JSON** message body. This way you can validate the incoming message view, not python arguments. - - Thus our example checks not `#!python mock.assert_called_with(name="John", user_id=1)`, but `#!python mock.assert_called_with({ "name": "John", "user_id": 1 })`. - -You should be careful with this feature: all mock objects will be cleared when the context manager exits. - -=== "Kafka" - ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_kafka.py [ln:9-16] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_rabbit.py [ln:9-16] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="6 8" - {!> docs_src/getting_started/subscription/testing_nats.py [ln:9-16] !} - ``` - -## Real Broker Testing - -If you want to test your application in a real environment, you shouldn't have to rewrite all you tests: just pass `with_real` optional parameter to your `TestClient` context manager. This way, `TestClient` supports all the testing features but uses an unpatched broker to send and consume messages. - -=== "Kafka" - ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="4 11 13 20 23" - {!> docs_src/getting_started/subscription/real_testing_nats.py !} - ``` - -!!! tip - When you're using a patched broker to test your consumers, the publish method is called synchronously with a consumer one, so you need not wait until your message is consumed. But in the real broker's case, it doesn't. - - For this reason, you have to wait for message consumption manually with the special `#!python handler.wait_call(timeout)` method. - Also, inner handler exceptions will be raised in this function, not `#!python broker.publish(...)`. - -### A Little Tip - -It can be very helpful to set the `with_real` flag using an environment variable. This way, you will be able to choose the testing mode right from the command line: - -```bash -WITH_REAL=True/False pytest tests/ -``` - -To learn more about managing your application configiruation visit [this](../config/index.md){.internal-link} page. diff --git a/search/docs/getting-started/template/index.md b/search/docs/getting-started/template/index.md deleted file mode 100644 index 8f794b1..0000000 --- a/search/docs/getting-started/template/index.md +++ /dev/null @@ -1,207 +0,0 @@ -# FastStream Template - -[FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} is a versatile repository that provides a solid foundation for your Python projects. It comes with a basic application, testing infrastructure, linting scripts, and various development tools to kickstart your development process. Whether you're building a new application from scratch or want to enhance an existing one, this template will save you time and help you maintain high code quality. - -## Features - -* **Basic Application**: [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} includes a basic Python application as a starting point for your project. You can easily replace it with your own code. - -* **Testing Framework**: We've set up [`pytest`](https://pytest.org/){.external-link target="_blank"} for running unit tests. Write your tests in the tests directory and use the provided workflow for automated testing. - -* **Linting**: Keep your code clean and consistent with linting tools. The repository includes linting scripts and configurations for [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"}, [`black`](https://github.com/psf/black){.external-link target="_blank"}, [`ruff`](https://github.com/astral-sh/ruff){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} - -* **Docker Support**: The included Dockerfile allows you to containerize your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application. Build and run your application in a containerized environment with ease. - -* **Dependency Management**: All application requirements and development dependencies are specified in the `pyproject.toml` file. This includes not only your project's dependencies but also configurations for various tools like [`pytest`](https://pytest.org/){.external-link target="_blank"}, [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"}, [`black`](https://github.com/psf/black){.external-link target="_blank"}, [`ruff`](https://github.com/astral-sh/ruff){.external-link target="_blank"}, and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"}. - -* **Continuous Integration (CI)**: [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} comes with three [GitHub Actions](https://github.com/features/actions){.external-link target="_blank"} workflows under the `.github/workflows` directory: - - 1. **Static Analysis and Testing**: This workflow consists of two jobs. The first job runs static analysis tools ([`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"}) to check your code for potential issues. If successful, the second job runs [`pytest`](https://pytest.org/){.external-link target="_blank"} to execute your test suite. - - 2. **Docker Build and Push**: This workflow automates the process of building a [`Docker`](https://www.docker.com/){.external-link target="_blank"} image for your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application and pushing it to the [GitHub Container Registry](https://ghcr.io){.external-link target="_blank"}. - - 3. **AsyncAPI Documentation**: The third workflow builds [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation for your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application and deploys it to [GitHub Pages](https://pages.github.com/){.external-link target="_blank"}. This is useful for documenting your API and making it accessible to others. - -## Getting Started - -To set up your development environment, follow these steps: - -1. In the [FastStream Template](https://github.com/airtai/faststream-template/tree/main){.external-link target="_blank"} repository, click on **Use this template** -> **Create a new repository**:![use-this-template](https://github.com/airtai/faststream/assets/7011056/6e4e913c-8cd5-4e14-95dc-3bc13b6989be) - In the next screen, fill out details such as repository name, description, etc:![create-repo](https://github.com/airtai/faststream/assets/7011056/77734648-6356-4388-b8c4-279a38fb5f65) - -2. Clone this repository to your local machine: - ```bash - git clone https://github.com//.git - cd - ``` - > **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. - -3. Install all development requirements using pip: - ```bash - pip install -e ".[dev]" - ``` - -## Development - -The application code is located in the `app/` directory. You can add new features or fix bugs in this directory. However, remember that code changes must be accompanied by corresponding updates to the tests located in the `tests/` directory. - -## Running Tests - -Once you have updated tests, you can execute the tests using [`pytest`](https://pytest.org/){.external-link target="_blank"}: - -```bash -pytest -``` - -## Running FastStream Application Locally - -To run the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application locally, follow these steps: - -1. Start the Kafka Docker container locally using the provided script: - ```bash - ./scripts/start_kafka_broker_locally.sh - ``` - -2. Start the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application with the following command: - ```bash - faststream run app.application:app --workers 1 - ``` - -3. You can now send messages to the Kafka topic and can test the application. Optionally, if you want to view messages in a topic, you can subscribe to it using the provided script: - ```bash - ./scripts/subscribe_to_kafka_broker_locally.sh - ``` - -4. To stop the [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application, press `Ctrl+C`. - -5. Finally, stop the Kafka Docker container by running the script: - ```bash - ./scripts/stop_kafka_broker_locally.sh - ``` - -## Building and Testing Docker Image Locally - -If you'd like to build and test the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally, follow these steps: - -1. Run the provided script to build the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally. Use the following command: - ```bash - ./scripts/build_docker.sh - ``` - This script will build the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image locally with the same name as the one built in `CI`. - -2. Before starting the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container, ensure that a Kafka [`Docker`](https://www.docker.com/){.external-link target="_blank"} container is running locally. You can start it using the provided script: - ```bash - ./scripts/start_kafka_broker_locally.sh - ``` - -3. Once Kafka is up and running, you can start the local [`Docker`](https://www.docker.com/){.external-link target="_blank"} container using the following command: - ```bash - docker run --rm --name faststream-app --net=host ghcr.io//:latest - ``` - `--rm`: This flag removes the container once it stops running, ensuring that it doesn't clutter your system with unused containers. - `--name faststream-app`: Assigns a name to the running container, in this case, "faststream-app". - `--net=host`: This flag allows the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container to share the host's network namespace. - -4. To stop the local [`Docker`](https://www.docker.com/){.external-link target="_blank"} container, simply press `Ctrl+C` in your terminal. - -5. Finally, stop the Kafka [`Docker`](https://www.docker.com/){.external-link target="_blank"} container by running the provided script: - ```bash - ./scripts/stop_kafka_broker_locally.sh - ``` - -> **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository in the above commands. - -## Code Linting - -After making changes to the code, it's essential to ensure it adheres to coding standards. We provide a script to help you with code formatting and linting. Run the following script to automatically fix linting issues: - -```bash -./scripts/lint.sh -``` - -## Static Analysis - -Static analysis tools [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} can help identify potential issues in your code. To run static analysis, use the following script: - -```bash -./scripts/static-analysis.sh -``` - -If there are any static analysis errors, resolve them in your code and rerun the script until it passes successfully. - -## Viewing AsyncAPI Documentation - -[`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} framework supports [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation. To ensure that your changes are reflected in the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation, follow these steps: - -1. Run the following command to view the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation: - ```bash - faststream docs serve app.application:app - ``` - This command builds the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} specification file, generates [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation based on the specification, and serves it at `localhost:8000`. - -2. Open your web browser and navigate to to view the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation reflecting your changes. - -3. To stop the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation server, press `Ctrl+C`. - -## Contributing - -Once you have successfully completed all the above steps, you are ready to contribute your changes: - -1. Add and commit your changes: - ```bash - git add . - git commit -m "Your commit message" - ``` - -2. Push your changes to GitHub: - ```bash - git push origin your-branch - ``` - -3. Create a merge request on GitHub. - -## Continuous Integration (CI) - -This repository is equipped with GitHub Actions that automate static analysis and pytest in the CI pipeline. Even if you forget to perform any of the required steps, CI will catch any issues before merging your changes. - -This repository has three workflows, each triggered when code is pushed: - -1. **Tests Workflow**: This workflow is named "Tests" and consists of two jobs. The first job runs static analysis tools [`mypy`](https://mypy.readthedocs.io/en/stable/){.external-link target="_blank"} and [`bandit`](https://bandit.readthedocs.io/en/latest/){.external-link target="_blank"} to identify potential issues in the codebase. The second job runs tests using [`pytest`](https://pytest.org/){.external-link target="_blank"} to ensure the functionality of the application. Both jobs run simultaneously to expedite the `CI` process. - -2. **Build Docker Image Workflow**: This workflow is named "Build Docker Image" and has one job. In this job, a [`Docker`](https://www.docker.com/){.external-link target="_blank"} image is built based on the provided Dockerfile. The built image is then pushed to the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"}, making it available for deployment or other purposes. - -3. **Deploy FastStream AsyncAPI Docs Workflow**: The final workflow is named "Deploy FastStream AsyncAPI Docs" and also consists of a single job. In this job, the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is built from the specification, and the resulting documentation is deployed to [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"}. This allows for easy access and sharing of the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation with the project's stakeholders. - -## Viewing AsyncAPI Documentation Hosted at GitHub Pages - -After the **Deploy FastStream AsyncAPI Docs** workflow in `CI` has been successfully completed, the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is automatically deployed to [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"}. This provides a convenient way to access and share the documentation with project stakeholders. - -To view the deployed [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation, open your web browser and navigate to the following URL: - -```txt -https://.github.io// -``` - -> **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. - -You will be directed to the [**GitHub Pages**](https://pages.github.com/){.external-link target="_blank"} site where your [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation is hosted. This hosted documentation allows you to easily share your [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} specifications with others and provides a centralized location for reviewing the [`AsyncAPI`](https://www.asyncapi.com/){.external-link target="_blank"} documentation. - -## Deploying Docker Container - -Once the **Build Docker Image** workflow in `CI` has successfully completed, the built [`Docker`](https://www.docker.com/){.external-link target="_blank"} image is pushed to the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"}. You can then deploy this image on your server by following these steps: - -1. Pull the [`Docker`](https://www.docker.com/){.external-link target="_blank"} image from the [**GitHub Container Registry**](https://ghcr.io){.external-link target="_blank"} to your server using the following command: - ```bash - docker pull ghcr.io//:latest - ``` - > **_NOTE:_** Replace `` with your GitHub username and `` with the name of your repository. - -2. After successfully pulling the image, start the [`Docker`](https://www.docker.com/){.external-link target="_blank"} container using the following command: - ```bash - docker run --rm --name faststream-app --env-file /path/to/env-file ghcr.io//:latest - ``` - `--rm`: This flag removes the container once it stops running, ensuring that it doesn't clutter your system with unused containers. - `--name faststream-app`: Assigns a name to the running container, in this case, "faststream-app". - `--env-file /path/to/env-file`: Specifies the path to an environment file (commonly a `.env` file) that contains environment variables required by your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application. Storing secrets and configuration in an environment file is a secure and best practice for handling sensitive information such as Kafka host, port, and authentication details. - -By following these steps, you can easily deploy your [`FastStream`](https://github.com/airtai/faststream){.external-link target="_blank"} application as a [`Docker`](https://www.docker.com/){.external-link target="_blank"} container on your server. Remember to customize the `env-file` and other environment variables as needed to suit your specific application requirements. diff --git a/search/docs/index.md b/search/docs/index.md deleted file mode 100644 index 7969e3e..0000000 --- a/search/docs/index.md +++ /dev/null @@ -1,352 +0,0 @@ ---- -hide: - - navigation - - footer ---- - -# FastStream - -Effortless event stream integration for your services - ---- - -

- - Test Passing - - - - Coverage - - - - Downloads - - - - Package version - - - - Supported Python versions - - -
- - - CodeQL - - - - Dependency Review - - - - License - - - - Code of Conduct - - - - Discord - -

- ---- - -## Features - -[**FastStream**](https://faststream.airt.ai/) simplifies the process of writing producers and consumers for message queues, handling all the -parsing, networking and documentation generation automatically. - -Making streaming microservices has never been easier. Designed with junior developers in mind, **FastStream** simplifies your work while keeping the door open for more advanced use-cases. Here's a look at the core features that make **FastStream** a go-to framework for modern, data-centric microservices. - -- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ**, **NATS** support) - -- [**Pydantic Validation**](#writing-app-code): Leverage [**Pydantic's**](https://docs.pydantic.dev/){.external-link target="_blank"} validation capabilities to serialize and validates incoming messages - -- [**Automatic Docs**](#project-documentation): Stay ahead with automatic [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} documentation - -- **Intuitive**: Full-typed editor support makes your development experience smooth, catching errors before they reach runtime - -- [**Powerful Dependency Injection System**](#dependencies): Manage your service dependencies efficiently with **FastStream**'s built-in DI system - -- [**Testable**](#testing-the-service): Supports in-memory tests, making your CI/CD pipeline faster and more reliable - -- **Extendable**: Use extensions for lifespans, custom serialization and middlewares - -- [**Integrations**](#any-framework): **FastStream** is fully compatible with any HTTP framework you want ([**FastAPI**](#fastapi-plugin) especially) - -- [**Built for Automatic Code Generation**](#code-generator): **FastStream** is optimized for automatic code generation using advanced models like GPT and Llama - -That's **FastStream** in a nutshell—easy, efficient, and powerful. Whether you're just starting with streaming microservices or looking to scale, **FastStream** has got you covered. - ---- - -## History - -**FastStream** is a new package based on the ideas and experiences gained from [**FastKafka**](https://github.com/airtai/fastkafka){.external-link target="_blank"} and [**Propan**](https://github.com/lancetnik/propan){.external-link target="_blank"}. By joining our forces, we picked up the best from both packages and created a unified way to write services capable of processing streamed data regradless of the underliying protocol. We'll continue to maintain both packages, but new development will be in this project. If you are starting a new service, this package is the recommended way to do it. - ---- - -## Install - -**FastStream** works on **Linux**, **macOS**, **Windows** and most **Unix**-style operating systems. -You can install it with `pip` as usual: - -=== "Kafka" - ```sh - pip install faststream[kafka] - ``` - -=== "RabbitMQ" - ```sh - pip install faststream[rabbit] - ``` - -=== "NATS" - ```sh - pip install faststream[nats] - ``` - -!!! tip "" - By default **FastStream** uses **PydanticV2** written in **Rust**, but you can downgrade it manually, if your platform has no **Rust** support - **FastStream** will work correctly with **PydanticV1** as well. - ---- - -## Writing app code - -**FastStream** brokers provide convenient function decorators `#!python @broker.subscriber` -and `#!python @broker.publisher` to allow you to delegate the actual process of: - -- consuming and producing data to Event queues, and - -- decoding and encoding JSON encoded messages - -These decorators make it easy to specify the processing logic for your consumers and producers, allowing you to focus on the core business logic of your application without worrying about the underlying integration. - -Also, **FastStream** uses [**Pydantic**](https://docs.pydantic.dev/){.external-link target="_blank"} to parse input -JSON-encoded data into Python objects, making it easy to work with structured data in your applications, so you can serialize your input messages just using type annotations. - -Here is an example python app using **FastStream** that consumes data from an incoming data stream and outputs the data to another one: - -=== "Kafka" - ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_kafka.py!} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_rabbit.py!} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="9" - {!> docs_src/index/basic_nats.py!} - ``` - -Also, **Pydantic**’s [`BaseModel`](https://docs.pydantic.dev/usage/models/){.external-link target="_blank"} class allows you -to define messages using a declarative syntax, making it easy to specify the fields and types of your messages. - -=== "Kafka" - ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_kafka.py !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_rabbit.py !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="1 8 14" - {!> docs_src/index/pydantic_nats.py !} - ``` - ---- - -## Testing the service - -The service can be tested using the `TestBroker` context managers, which, by default, puts the Broker into "testing mode". - -The Tester will redirect your `subscriber` and `publisher` decorated functions to the InMemory brokers, allowing you to quickly test your app without the need for a running broker and all its dependencies. - -Using pytest, the test for our service would look like this: - -=== "Kafka" - ```python linenums="1" hl_lines="5 10 18-19" - # Code above omitted 👆 - - {!> docs_src/index/test_kafka.py [ln:3-21] !} - ``` - -=== "RabbitMQ" - ```python linenums="1" hl_lines="5 10 18-19" - # Code above omitted 👆 - - {!> docs_src/index/test_rabbit.py [ln:3-21] !} - ``` - -=== "NATS" - ```python linenums="1" hl_lines="5 10 18-19" - # Code above omitted 👆 - - {!> docs_src/index/test_nats.py [ln:3-21] !} - ``` - -## Running the application - -The application can be started using built-in **FastStream** CLI command. - -To run the service, use the **FastStream CLI** command and pass the module (in this case, the file where the app implementation is located) and the app symbol to the command. - -``` shell -faststream run basic:app -``` - -After running the command, you should see the following output: - -``` shell -INFO - FastStream app starting... -INFO - input_data | - `HandleMsg` waiting for messages -INFO - FastStream app started successfully! To exit press CTRL+C -``` - -Also, **FastStream** provides you a great hot reload feature to improve your Development Experience - -``` shell -faststream run basic:app --reload -``` - -And multiprocessing horizontal scaling feature as well: - -``` shell -faststream run basic:app --workers 3 -``` - -You can know more about **CLI** features [here](./getting-started/cli/index.md){.internal-link} - ---- - -## Project Documentation - -**FastStream** automatically generates documentation for your project according to the [**AsyncAPI**](https://www.asyncapi.com/){.external-link target="_blank"} specification. You can work with both generated artifacts and place a web view of your documentation on resources available to related teams. - -The availability of such documentation significantly simplifies the integration of services: you can immediately see what channels and message formats the application works with. And most importantly, it won't cost anything - **FastStream** has already created the docs for you! - -![HTML-page](../assets/img/AsyncAPI-basic-html-short.png) - ---- - -## Dependencies - -**FastStream** (thanks to [**FastDepends**](https://lancetnik.github.io/FastDepends/){.external-link target="_blank"}) has a dependency management system similar to `pytest fixtures` and `FastAPI Depends` at the same time. Function arguments declare which dependencies you want are needed, and a special decorator delivers them from the global Context object. - -```python linenums="1" hl_lines="9-10" -from faststream import Depends, Logger -async def base_dep(user_id: int) -> bool: - return True - -@broker.subscriber("in-test") -async def base_handler(user: str, - logger: Logger, - dep: bool = Depends(base_dep)): - assert dep is True - logger.info(user) -``` - ---- - -## HTTP Frameworks integrations - -### Any Framework - -You can use **FastStream** `MQBrokers` without a `FastStream` application. -Just *start* and *stop* them according to your application's lifespan. - -{! includes/index/integrations.md !} - -### **FastAPI** Plugin - -Also, **FastStream** can be used as part of **FastAPI**. - -Just import a **StreamRouter** you need and declare the message handler with the same `#!python @router.subscriber(...)` and `#!python @router.publisher(...)` decorators. - -!!! tip - When used this way, **FastStream** does not utilize its own dependency and serialization system but integrates seamlessly into **FastAPI**. - This means you can use `Depends`, `BackgroundTasks` and other **FastAPI** tools as if it were a regular HTTP endpoint. - -{! includes/getting_started/integrations/fastapi/1.md !} - -!!! note - More integration features can be found [here](./getting-started/integrations/fastapi/index.md){.internal-link} - ---- - -## Code generator - -As evident, **FastStream** is an incredibly user-friendly framework. However, we've taken it a step further and made it even more user-friendly! Introducing [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"}, a Python library that harnesses the power of generative AI to effortlessly generate **FastStream** applications. Simply describe your application requirements, and [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"} will generate a production-grade **FastStream** project that is ready to deploy in no time. - -Save application description inside `description.txt`: - -``` -Create a FastStream application using localhost broker for testing and use the -default port number. - -It should consume messages from the 'input_data' topic, where each message is a -JSON encoded object containing a single attribute: 'data'. - -While consuming from the topic, increment the value of the data attribute by 1. - -Finally, send message to the 'output_data' topic. -``` - -and run the following command to create a new **FastStream** project: - -``` shell -faststream_gen -i description.txt -``` - -``` shell -✨ Generating a new FastStream application! - ✔ Application description validated. - ✔ FastStream app skeleton code generated. akes around 15 to 45 seconds)... - ✔ The app and the tests are generated. around 30 to 90 seconds)... - ✔ New FastStream project created. - ✔ Integration tests were successfully completed. - Tokens used: 10768 - Total Cost (USD): $0.03284 -✨ All files were successfully generated! -``` - -### Tutorial - -We also invite you to explore our tutorial, where we will guide you through the process of utilizing the [**faststream-gen**](https://faststream-gen.airt.ai){.external-link target="_blank"} Python library to effortlessly create **FastStream** applications: - -- [Cryptocurrency analysis with FastStream](https://faststream-gen.airt.ai/Tutorial/Cryptocurrency_Tutorial/){.external-link target="_blank"} - ---- - -## Stay in touch - -Please show your support and stay in touch by: - -- giving our [GitHub repository](https://github.com/airtai/faststream/){.external-link target="_blank"} a star, and - -- joining our [Discord server](https://discord.gg/CJWmYpyFbc){.external-link target="_blank"} - -Your support helps us to stay in touch with you and encourages us to -continue developing and improving the framework. Thank you for your -support! - ---- - -## Contributors - -Thanks to all of these amazing people who made the project better! - - - - diff --git a/search/docs/kafka/Publisher/batch_publisher.md b/search/docs/kafka/Publisher/batch_publisher.md deleted file mode 100644 index 5697c91..0000000 --- a/search/docs/kafka/Publisher/batch_publisher.md +++ /dev/null @@ -1,96 +0,0 @@ -# Publishing in Batches - -## General overview - -If you need to send your data in batches, the @broker.publisher(...) decorator offers a convenient way to achieve this. To enable batch production, you need to perform two crucial steps: - -Step 1: When creating your publisher, set the batch argument to True. This configuration tells the publisher that you intend to send messages in batches. - -Step 2: In your producer function, return a tuple containing the messages you want to send as a batch. This action triggers the producer to gather the messages and transmit them as a batch to a Kafka broker. - -Let's delve into a detailed example illustrating how to produce messages in batches to the output_data topic while consuming from the input_data_1 topic. - -## Code example - -First, lets take a look at the whole app creation and then dive deep into the steps for producing in batches, here is the application code: - -```python linenums="1" -from typing import Tuple - -from pydantic import BaseModel, Field, NonNegativeFloat - -from faststream import FastStream, Logger -from faststream.kafka import KafkaBroker - - -class Data(BaseModel): - data: NonNegativeFloat = Field( - ..., examples=[0.5], description="Float data example" - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -decrease_and_increase = broker.publisher("output_data", batch=True) - - -@decrease_and_increase -@broker.subscriber("input_data_1") -async def on_input_data_1(msg: Data, logger: Logger) -> Tuple[Data, Data]: - logger.info(msg) - return Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) - - -@broker.subscriber("input_data_2") -async def on_input_data_2(msg: Data, logger: Logger) -> None: - logger.info(msg) - await decrease_and_increase.publish( - Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) - ) -``` - -Below, we have highlighted key lines of code that demonstrate the steps involved in creating and using a batch publisher: - -Step 1: Creation of the Publisher - -```python linenums="1" -decrease_and_increase = broker.publisher("output_data", batch=True) -``` - -Step 2: Publishing an Actual Batch of Messages - -You can publish a batch by directly calling the publisher with a batch of messages you want to publish, like shown here: - -```python linenums="1" - await decrease_and_increase.publish( - Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) - ) -``` - -Or you can decorate your processing function and return a batch of messages like shown here: - -```python linenums="1" -@decrease_and_increase -@broker.subscriber("input_data_1") -async def on_input_data_1(msg: Data, logger: Logger) -> Tuple[Data, Data]: - logger.info(msg) - return Data(data=(msg.data * 0.5)), Data(data=(msg.data * 2.0)) -``` - -The application in the example imelements both of these ways, feel free to use whatever option fits your needs better. - -## Why publish in batches? - -In this example, we've explored how to leverage the @broker.publisher decorator to efficiently publish messages in batches using FastStream and Kafka. By following the two key steps outlined in the previous sections, you can significantly enhance the performance and reliability of your Kafka-based applications. - -Publishing messages in batches offers several advantages when working with Kafka: - -1. Improved Throughput: Batch publishing allows you to send multiple messages in a single transmission, reducing the overhead associated with individual message delivery. This leads to improved throughput and lower latency in your Kafka applications. - -2. Reduced Network and Broker Load: Sending messages in batches reduces the number of network calls and broker interactions. This optimization minimizes the load on the Kafka brokers and network resources, making your Kafka cluster more efficient. - -3. Atomicity: Batches ensure that a group of related messages is processed together or not at all. This atomicity can be crucial in scenarios where message processing needs to maintain data consistency and integrity. - -4. Enhanced Scalability: With batch publishing, you can efficiently scale your Kafka applications to handle high message volumes. By sending messages in larger chunks, you can make the most of Kafka's parallelism and partitioning capabilities. diff --git a/search/docs/kafka/Publisher/index.md b/search/docs/kafka/Publisher/index.md deleted file mode 100644 index 5e34d73..0000000 --- a/search/docs/kafka/Publisher/index.md +++ /dev/null @@ -1,126 +0,0 @@ -# Publishing - -The **FastStream** KafkaBroker supports all regular [publishing use cases](../../getting-started/publishing/index.md){.internal-link}, and you can use them without any changes. - -However, if you wish to further customize the publishing logic, you should take a closer look at specific KafkaBroker parameters. - -## Basic Kafka Publishing - -The `KafkaBroker` uses the unified `publish` method (from a `producer` object) to send messages. - -In this case, you can use Python primitives and `pydantic.BaseModel` to define the content of the message you want to publish to the Kafka broker. - -You can specify the topic to send by its name. - -1. Create your KafkaBroker instance - -```python linenums="1" -broker = KafkaBroker("localhost:9092") -``` - -2. Publish a message using the `publish` method - -```python linenums="1" - msg = Data(data=0.5) - - await broker.publish( - model_to_json(msg), - "input_data", - headers={"content-type": "application/json"}, - ) -``` - -This is the most basic way of using the KafkaBroker to publish a message. - -## Creating a publisher object - -The simplest way to use a KafkaBroker for publishing has a significant limitation: your publishers won't be documented in the AsyncAPI documentation. This might be acceptable for sending occasional one-off messages. However, if you're building a comprehensive service, it's recommended to create publisher objects. These objects can then be parsed and documented in your service's AsyncAPI documentation. Let's go ahead and create those publisher objects! - -1. Create your KafkaBroker instance - -```python linenums="1" -broker = KafkaBroker("localhost:9092") -``` - -2. Create a publisher instance - -```python linenums="1" -prepared_publisher = broker.publisher("input_data") -``` - -2. Publish a message using the `publish` method of the prepared publisher - -```python linenums="1" - msg = Data(data=0.5) - - await prepared_publisher.publish( - model_to_json(msg), - headers={"content-type": "application/json"}, - ) -``` - -Now, when you wrap your broker into a FastStream object, the publisher will be exported to the AsyncAPI documentation. - -## Decorating your publishing functions - -To publish messages effectively in the Kafka context, consider utilizing the Publisher Decorator. This approach offers an AsyncAPI representation and is ideal for rapidly developing applications. - -The Publisher Decorator creates a structured DataPipeline unit with both input and output components. The sequence in which you apply Subscriber and Publisher decorators does not affect their functionality. However, note that these decorators can only be applied to functions decorated by a Subscriber as well. - -This method relies on the return type annotation of the handler function to properly interpret the function's return value before sending it. Hence, it's important to ensure accuracy in defining the return type. - -Let's start by examining the entire application that utilizes the Publisher Decorator and then proceed to walk through it step by step. - -```python linenums="1" -from pydantic import BaseModel, Field, NonNegativeFloat - -from faststream import FastStream -from faststream.kafka import KafkaBroker - - -class Data(BaseModel): - data: NonNegativeFloat = Field( - ..., examples=[0.5], description="Float data example" - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -to_output_data = broker.publisher("output_data") - - -@to_output_data -@broker.subscriber("input_data") -async def on_input_data(msg: Data) -> Data: - return Data(data=msg.data + 1.0) -``` - -1. **Initialize the KafkaBroker instance:** Start by initializing a KafkaBroker instance with the necessary configuration, including Kafka broker address. - -```python linenums="1" -broker = KafkaBroker("localhost:9092") -``` - -2. **Prepare your publisher object to use later as a decorator:** - -```python linenums="1" -to_output_data = broker.publisher("output_data") -``` - -3. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic - -```python linenums="1" -async def on_input_data(msg: Data) -> Data: - return Data(data=msg.data + 1.0) -``` - -4. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. - -```python linenums="1" -@to_output_data -@broker.subscriber("input_data") -async def on_input_data(msg: Data) -> Data: - return Data(data=msg.data + 1.0) -``` diff --git a/search/docs/kafka/Publisher/using_a_key.md b/search/docs/kafka/Publisher/using_a_key.md deleted file mode 100644 index 73faef6..0000000 --- a/search/docs/kafka/Publisher/using_a_key.md +++ /dev/null @@ -1,61 +0,0 @@ -# Using a Partition Key - -Partition keys are a crucial concept in Apache Kafka, enabling you to determine the appropriate partition for a message. This ensures that related messages are kept together in the same partition, which can be invaluable for maintaining order or grouping related messages for efficient processing. Additionally, Kafka utilizes partitioning to distribute load across multiple brokers and scale horizontally, while replicating data across brokers provides fault tolerance. - -You can specify your partition keys when utilizing the `@KafkaBroker.publisher(...)` decorator in FastStream. This guide will walk you through the process of using partition keys effectively. - -## Publishing with a Partition Key - -To publish a message to a Kafka topic using a partition key, follow these steps: - -### Step 1: Define the Publisher - -In your FastStream application, define the publisher using the `@KafkaBroker.publisher(...)` decorator. This decorator allows you to configure various aspects of message publishing, including the partition key. - -```python linenums="1" -to_output_data = broker.publisher("output_data") -``` - -### Step 2: Pass the Key - -When you're ready to publish a message with a specific key, simply include the `key` parameter in the `publish` function call. This key parameter is used to determine the appropriate partition for the message. - -```python linenums="1" - await to_output_data.publish(Data(data=msg.data + 1.0), key=b"key") -``` - -## Example Application - -Let's examine a complete application example that consumes messages from the **input_data** topic and publishes them with a specified key to the **output_data** topic. This example will illustrate how to incorporate partition keys into your Kafka-based applications: - -```python linenums="1" -from pydantic import BaseModel, Field, NonNegativeFloat - -from faststream import Context, FastStream, Logger -from faststream.kafka import KafkaBroker - - -class Data(BaseModel): - data: NonNegativeFloat = Field( - ..., examples=[0.5], description="Float data example" - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -to_output_data = broker.publisher("output_data") - - -@broker.subscriber("input_data") -async def on_input_data( - msg: Data, logger: Logger, key: bytes = Context("message.raw_message.key") -) -> None: - logger.info(f"on_input_data({msg=})") - await to_output_data.publish(Data(data=msg.data + 1.0), key=b"key") -``` - -As you can see, the primary difference from standard publishing is the inclusion of the `key` parameter in the `publish` call. This key parameter is essential for controlling how Kafka partitions and processes your messages. - -In summary, using partition keys in Apache Kafka is a fundamental practice for optimizing message distribution, maintaining order, and achieving efficient processing. It is a key technique for ensuring that your Kafka-based applications scale gracefully and handle large volumes of data effectively. diff --git a/search/docs/kafka/Subscriber/batch_subscriber.md b/search/docs/kafka/Subscriber/batch_subscriber.md deleted file mode 100644 index 3a207fd..0000000 --- a/search/docs/kafka/Subscriber/batch_subscriber.md +++ /dev/null @@ -1,58 +0,0 @@ -# Batch Subscriber - -If you want to consume data in batches, the `@broker.subscriber(...)` decorator makes it possible. By defining your consumed `msg` object as a list of messages and setting the `batch` parameter to `True`, the subscriber will call your consuming function with a batch of messages consumed from a single partition. Let's walk through how to achieve this. - -## Using the Subscriber with Batching - -To consume messages in batches, follow these steps: - -### Step 1: Define Your Subscriber - -In your FastStream application, define the subscriber using the `@broker.subscriber(...)` decorator. Ensure that you configure the `msg` object as a list and set the `batch` parameter to `True`. This configuration tells the subscriber to handle message consumption in batches. - -```python linenums="1" -@broker.subscriber("test_batch", batch=True) -``` - -### Step 2: Implement Your Consuming Function - -Create a consuming function that accepts the list of messages. The `@broker.subscriber(...)` decorator will take care of collecting and grouping messages into batches based on the partition. - -```python linenums="1" -@broker.subscriber("test_batch", batch=True) -async def handle_batch(msg: List[HelloWorld], logger: Logger): - logger.info(msg) -``` - -## Example of Consuming in Batches - -Let's illustrate how to consume messages in batches from the **test_batch** topic with a practical example: - -```python linenums="1" -from typing import List - -from pydantic import BaseModel, Field - -from faststream import FastStream, Logger -from faststream.kafka import KafkaBroker - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -class HelloWorld(BaseModel): - msg: str = Field( - ..., - examples=["Hello"], - description="Demo hello world message", - ) - - -@broker.subscriber("test_batch", batch=True) -async def handle_batch(msg: List[HelloWorld], logger: Logger): - logger.info(msg) -``` - -In this example, the subscriber is configured to process messages in batches, and the consuming function is designed to handle these batches efficiently. - -Consuming messages in batches is a valuable technique when you need to optimize the processing of high volumes of data in your Kafka-based applications. It allows for more efficient resource utilization and can enhance the overall performance of your data pipelines. diff --git a/search/docs/kafka/Subscriber/index.md b/search/docs/kafka/Subscriber/index.md deleted file mode 100644 index bbeaf90..0000000 --- a/search/docs/kafka/Subscriber/index.md +++ /dev/null @@ -1,78 +0,0 @@ -# Basic Subscriber - -To start consuming from a Kafka topic, just decorate your consuming function with a `#!python @broker.subscriber(...)` decorator, passing a string as a topic key. - -In the folowing example, we will create a simple FastStream app that will consume `HelloWorld` messages from a **hello_world** topic. - -The full app code looks like this: - -```python linenums="1" -from pydantic import BaseModel, Field - -from faststream import FastStream, Logger -from faststream.kafka import KafkaBroker - - -class HelloWorld(BaseModel): - msg: str = Field( - ..., - examples=["Hello"], - description="Demo hello world message", - ) - - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -@broker.subscriber("hello_world") -async def on_hello_world(msg: HelloWorld, logger: Logger): - logger.info(msg) -``` - -## Import FastStream and KafkaBroker - -To use the `#!python @broker.subscriber(...)` decorator, first we need to import the base FastStream app KafkaBroker to create our broker. - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.kafka import KafkaBroker -``` - -## Define the HelloWorld Message Structure - -Next, you need to define the structure of the messages you want to consume from the topic using Pydantic. For the guide, we’ll stick to something basic, but you are free to define any complex message structure you wish in your project. - -```python linenums="1" -class HelloWorld(BaseModel): - msg: str = Field( - ..., - examples=["Hello"], - description="Demo hello world message", - ) -``` - -## Create a KafkaBroker - -Next, we will create a `KafkaBroker` object and wrap it into the `FastStream` object so that we can start our app using CLI later. - -```python linenums="1" -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) -``` - -## Create a Function that will Consume Messages from a Kafka hello-world Topic - -Let’s create a consumer function that will consume `HelloWorld` messages from **hello_world** topic and log them. - -```python linenums="1" -@broker.subscriber("hello_world") -async def on_hello_world(msg: HelloWorld, logger: Logger): - logger.info(msg) -``` - -The function decorated with the `#!python @broker.subscriber(...)` decorator will be called when a message is produced to Kafka. - -The message will then be injected into the typed `msg` argument of the function, and its type will be used to parse the message. - -In this example case, when the message is sent to a **hello_world** topic, it will be parsed into a `HelloWorld` class, and the `on_hello_world` function will be called with the parsed class as the `msg` argument value. diff --git a/search/docs/kafka/ack.md b/search/docs/kafka/ack.md deleted file mode 100644 index 46ad0b7..0000000 --- a/search/docs/kafka/ack.md +++ /dev/null @@ -1,88 +0,0 @@ -# Consuming Acknowledgements - - -As you may know, *Kafka* consumer should commit a topic offset when consuming a message. - - -The default behaviour, also implemented as such in the **FastStream**, automatically commits (*acks*) topic offset on message consumption. This is the *at most once* consuming strategy. - - -However, if you wish to use *at least once* strategy, you should commit offset *AFTER* the message is processed correctly. To accomplish that, set a consumer group and disable `auto_commit` option like this: - - -```python -@broker.subscriber( - "test", group_id="group", auto_commit=False -) -async def base_handler(body: str): - ... -``` - - -This way, upon successful return of the processing function, the message processed will be acknowledged. In the case of an exception being raised, the message will not be acknowledged. - - -However, there are situations where you might want to use a different acknowledgement logic. - - -## Manual Acknowledgement - - -If you want to acknowledge a message manually, you can get direct access to the message object via the [Context](../getting-started/context/existed.md){.internal-link} and acknowledge the message by calling the `ack` method: - - -```python -from faststream.kafka.annotations import KafkaMessage - - -@broker.subscriber( - "test", group_id="group", auto_commit=False -) -async def base_handler(body: str, msg: KafkaMessage): - await msg.ack() - # or - await msg.nack() -``` - - -!!! tip -You can use the `nack` method to prevent offset commit and the message can be consumed by another consumer within the same group. - - -**FastStream** will see that the message was already acknowledged and will do nothing at the end of the process. - - -## Interrupt Process - - -If you wish to interrupt the processing of a message at any call stack level and acknowledge the message, you can achieve that by raising the `faststream.exceptions.AckMessage`. - - -``` python linenums="1" hl_lines="2 18" -from faststream import FastStream -from faststream.exceptions import AckMessage -from faststream.kafka import KafkaBroker - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - - -@broker.subscriber( - "test-topic", group_id="test-group", auto_commit=False -) -async def handle(body): - smth_processing(body) - - -def smth_processing(body): - if True: - raise AckMessage() - - -@app.after_startup -async def test_publishing(): - await broker.publish("Hello!", "test-topic") -``` - - -This way, **FastStream** interrupts the current message processing and acknowledges it immediately. Similarly, you can raise `NackMessage` as well to prevent the message from being committed. diff --git a/search/docs/kafka/index.md b/search/docs/kafka/index.md deleted file mode 100644 index a879655..0000000 --- a/search/docs/kafka/index.md +++ /dev/null @@ -1,61 +0,0 @@ -# Kafka Routing - -## Kafka Overview - -### What is Kafka? - -[Kafka](https://kafka.apache.org/){.external-link target="_blank"} is an open-source distributed streaming platform developed by the Apache Software Foundation. It is designed to handle high-throughput, fault-tolerant, real-time data streaming. Kafka is widely used for building real-time data pipelines and streaming applications. - -### Key Kafka Concepts - -#### 1. Publish-Subscribe Model - -Kafka is built around the publish-subscribe messaging model. In this model, data is published to topics, and multiple consumers can subscribe to these topics to receive the data. This decouples the producers of data from the consumers, allowing for flexibility and scalability. - -#### 2. Topics - -A **topic** in Kafka is a logical channel or category to which messages are published by producers and from which messages are consumed by consumers. Topics are used to organize and categorize data streams. Each topic can have multiple **partitions**, which enable Kafka to distribute data and provide parallelism for both producers and consumers. - -## Kafka Topics - -### Understanding Kafka Topics - -Topics are fundamental to Kafka and serve as the central point of data distribution. Here are some key points about topics: - -- Topics allow you to logically group and categorize messages. -- Each message sent to Kafka is associated with a specific topic. -- Topics can have one or more partitions to enable parallel processing and scaling. -- Consumers subscribe to topics to receive messages. - -### FastStream KafkaBroker - -The FastStream KafkaBroker is a key component of the FastStream framework that enables seamless integration with Apache Kafka. With the KafkaBroker, developers can easily connect to Kafka brokers, produce messages to Kafka topics, and consume messages from Kafka topics within their FastStream applications. - -### Establishing a Connection - -To connect to Kafka using the FastStream KafkaBroker module, follow these steps: - -1. **Initialize the KafkaBroker instance:** Start by initializing a KafkaBroker instance with the necessary configuration, including Kafka broker address. - -2. **Create your processing logic:** Write a function that will consume the incoming messages in the defined format and produce a response to the defined topic - -3. **Decorate your processing function:** To connect your processing function to the desired Kafka topics you need to decorate it with `#!python @broker.subscriber` and `#!python @broker.publisher` decorators. Now, after you start your application, your processing function will be called whenever a new message in the subscribed topic is available and produce the function return value to the topic defined in the publisher decorator. - -Here's a simplified code example demonstrating how to establish a connection to Kafka using FastStream's KafkaBroker module: - -```python linenums="1" -from faststream import FastStream -from faststream.kafka import KafkaBroker - -broker = KafkaBroker("localhost:9092") -app = FastStream(broker) - -@broker.subscriber("in-topic") -@broker.publisher("out-topic") -async def handle_msg(user: str, user_id: int) -> str: - return f"User: {user_id} - {user} registered" -``` - -This minimal example illustrates how FastStream simplifies the process of connecting to Kafka and performing basic message processing from the **in_topic** to the **out-topic**. Depending on your specific use case and requirements, you can further customize your Kafka integration with FastStream to build robust and efficient streaming applications. - -For more advanced configuration options and detailed usage instructions, please refer to the FastStream Kafka documentation and the [offical Kafka documentation](https://kafka.apache.org/){.external-link target="_blank"}. diff --git a/search/docs/kafka/message.md b/search/docs/kafka/message.md deleted file mode 100644 index babf867..0000000 --- a/search/docs/kafka/message.md +++ /dev/null @@ -1,53 +0,0 @@ -# Access to Message Information - -As you may know, **FastStream** serializes a message body and provides you access to it through function arguments. However, there are times when you need to access additional message attributes such as offsets, headers, or other metadata. - -## Message Access - -You can easily access this information by referring to the message object in the [Context](../getting-started/context/existed.md)! - -This object serves as a unified **FastStream** wrapper around the native broker library message (for example, `aiokafka.ConsumerRecord` in the case of *Kafka*). It contains most of the required information, including: - -* `#!python body: bytes` -* `#!python checksum: int` -* `#!python headers: Sequence[Tuple[str, bytes]]` -* `#!python key: Optional[aiokafka.structs.KT]` -* `#!python offset: int` -* `#!python partition: int` -* `#!python serialized_key_size: int` -* `#!python serialized_value_size: int` -* `#!python timestamp: int` -* `#!python timestamp_type: int` -* `#!python topic: str` -* `#!python value: Optional[aiokafka.structs.VT]` - -For example, if you would like to access the headers of an incoming message, you would do so like this: - -```python hl_lines="1 6" -from faststream.kafka.annotations import KafkaMessage - -@broker.subscriber("test") -async def base_handler( - body: str, - msg: KafkaMessage, -): - print(msg.headers) -``` - -## Message Fields Access - -In most cases, you don't need all message fields; you need to know just a part of them. -You can use [Context Fields access](../getting-started/context/fields.md) feature for this. - -For example, you can get access to the `headers` like this: - -```python hl_lines="6" -from faststream import Context - -@broker.subscriber("test") -async def base_handler( - body: str, - headers: str = Context("message.headers"), -): - print(headers) -``` diff --git a/search/docs/nats/examples/direct.md b/search/docs/nats/examples/direct.md deleted file mode 100644 index 856f66f..0000000 --- a/search/docs/nats/examples/direct.md +++ /dev/null @@ -1,96 +0,0 @@ -# Direct - -The **Direct** Subject is the basic way to route messages in *NATS*. Its essence is very simple: -a `subject` sends messages to all consumers subscribed to it. - -## Scaling - -If one `subject` is being listened to by several consumers with the same `queue group`, the message will go to a random consumer each time. - -Thus, *NATS* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by simply launching additional instances of the consumer service. You don't need to make changes to the current infrastructure configuration: *NATS* will take care of how to distribute messages between your services. - -## Example - -The **Direct** Subject is the type used in **FastStream** by default: you can simply declare it as follows - -```python -@broker.handler("test_subject") -async def handler(): -... -``` - -Full example: - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.nats import NatsBroker - -broker = NatsBroker() -app = FastStream(broker) - -@broker.subscriber("test-subj-1", "workers") -async def base_handler1(logger: Logger): - logger.info("base_handler1") - -@broker.subscriber("test-subj-1", "workers") -async def base_handler2(logger: Logger): - logger.info("base_handler2") - -@broker.subscriber("test-subj-2", "workers") -async def base_handler3(logger: Logger): - logger.info("base_handler3") - -@app.after_startup -async def send_messages(): - await broker.publish("", "test-subj-1") # handlers: 1 or 2 - await broker.publish("", "test-subj-1") # handlers: 1 or 2 - await broker.publish("", "test-subj-2") # handlers: 3 -``` - -### Consumer Announcement - -To begin with, we have declared several consumers for two `subjects`: `test-subj-1` and `test-subj-2`: - -```python linenums="7" hl_lines="1 5 9" -@broker.subscriber("test-subj-1", "workers") -async def base_handler1(logger: Logger): - logger.info("base_handler1") - -@broker.subscriber("test-subj-1", "workers") -async def base_handler2(logger: Logger): - logger.info("base_handler2") - -@broker.subscriber("test-subj-2", "workers") -async def base_handler3(logger: Logger): - logger.info("base_handler3") -``` - -!!! note - Note that all consumers are subscribed using the same `queue_group`. Within the same service, this does not make sense, since messages will come to these handlers in turn. - Here, we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now the distribution of messages between these consumers will look like this: - -```python - await broker.publish("", "test-subj-1") # handlers: 1 or 2 -``` - -The message `1` will be sent to `handler1` or `handler2` because they are listening to one `subject` within one `queue group`. - ---- - -```python - await broker.publish("", "test-subj-1") # handlers: 1 or 2 -``` - -Message `2` will be sent similarly to message `1`. - ---- - -```python - await broker.publish("", "test-subj-2") # handlers: 3 -``` - -The message `3` will be sent to `handler3` because it is the only one listening to `test-subj-2`. diff --git a/search/docs/nats/examples/pattern.md b/search/docs/nats/examples/pattern.md deleted file mode 100644 index 4802aca..0000000 --- a/search/docs/nats/examples/pattern.md +++ /dev/null @@ -1,87 +0,0 @@ -# Pattern - -[**Pattern**](https://docs.nats.io/nats-concepts/subjects#wildcards){.external-link target="_blank"} Subject is a powerful *NATS* routing engine. This type of `subject` routes messages to consumers based on the *pattern* specified when they connect to the `subject` and a message key. - -## Scaling - -If one `subject` is being listened to by several consumers with the same `queue group`, the message will go to a random consumer each time. - -Thus, *NATS* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by simply launching additional instances of the consumer service. You don't need to make changes to the current infrastructure configuration: *NATS* will take care of how to distribute messages between your services. - -## Example - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.nats import NatsBroker - -broker = NatsBroker() -app = FastStream(broker) - -@broker.subscriber("*.info", "workers") -async def base_handler1(logger: Logger): - logger.info("base_handler1") - -@broker.subscriber("*.info", "workers") -async def base_handler2(logger: Logger): - logger.info("base_handler2") - -@broker.subscriber("*.error", "workers") -async def base_handler3(logger: Logger): - logger.info("base_handler3") - -@app.after_startup -async def send_messages(): - await broker.publish("", "logs.info") # handlers: 1 or 2 - await broker.publish("", "logs.info") # handlers: 1 or 2 - await broker.publish("", "logs.error") # handlers: 3 -``` - -### Consumer Announcement - -To begin with, we have announced several consumers for two `subjects`: `*.info` and `*.error`: - -```python linenums="7" hl_lines="1 5 9" -@broker.subscriber("*.info", "workers") -async def base_handler1(logger: Logger): - logger.info("base_handler1") - -@broker.subscriber("*.info", "workers") -async def base_handler2(logger: Logger): - logger.info("base_handler2") - -@broker.subscriber("*.error", "workers") -async def base_handler3(logger: Logger): - logger.info("base_handler3") -``` - -At the same time, in the `subject` of our consumers, we specify the *pattern* that will be processed by these consumers. - -!!! note - Note that all consumers are subscribed using the same `queue_group`. Within the same service, this does not make sense, since messages will come to these handlers in turn. - Here, we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now the distribution of messages between these consumers will look like this: - -```python - await broker.publish("", "logs.info") # handlers: 1 or 2 -``` - -The message `1` will be sent to `handler1` or `handler2` because they listen to the same `subject` template within the same `queue group`. - ---- - -```python - await broker.publish("", "logs.info") # handlers: 1 or 2 -``` - -Message `2` will be sent similarly to message `1`. - ---- - -```python - await broker.publish("", "logs.error") # handlers: 3 -``` - -The message `3` will be sent to `handler3` because it is the only one listening to the pattern `*.error*`. diff --git a/search/docs/nats/index.md b/search/docs/nats/index.md deleted file mode 100644 index 5fb8de0..0000000 --- a/search/docs/nats/index.md +++ /dev/null @@ -1,35 +0,0 @@ -# NATS - -!!! note "" - **FastStream** *NATS* support is implemented on top of [**nats-py**](https://github.com/nats-io/nats.py){.external-link target="_blank"}. You can always get access to objects of it if you need to use some low-level methods not represented in **FastStream**. - -## Advantages and Disadvantages - -*NATS* is an easy-to-use, high-performance message broker written in *Golang*. If your application does not require complex routing logic, can cope with high loads, scales, and does not require large hardware costs, *NATS* will be an excellent choice for you. - -Also *NATS* has a zero-cost new entities creation (to be honest, all `subjects` are just routing fields), so it can be used as a **RPC** over **MQ** tool. - -!!! note - More information about *NATS* can be found on the [official website](https://nats.io){.external-link target="_blank"}. - -However, *NATS* has disadvantages that you should be aware of: - -* Messages are not persistent. If a message is published while your consumer is disconnected, it will be lost. -* There are no complex routing mechanisms. -* There are no mechanisms for confirming receipt and processing of messages from the consumer. - -## NATS JetStream - -These shortcomings are corrected by using the persistent level - [**JetStream**](https://docs.nats.io/nats-concepts/jetstream){.external-link target="_blank"}. If you need strict guarantees for the delivery and processing of messages at the small detriment of speed and resources consumed, you can use **NatsJS**. - -Also, **NatsJS** supports some high-level features like *Key-Value* and *Object* storages (with subscription to changes on it) and provides you with rich abilities to build your logic on top of it. - -## Routing Rules - -*NATS* does not have the ability to configure complex routing rules. The only entity in *NATS* is `subject`, which can be subscribed to either directly by name or by a regular expression pattern. - -Both examples are discussed [a little further](./examples/direct.md){.internal-link}. - -In order to support the ability to scale consumers horizontally, *NATS* supports the `queue group` functionality: -a message sent to `subject` will be processed by a random consumer from the `queue group` subscribed to this `subject`. -This approach allows you to increase the processing speed of `subject` by *N* times when starting *N* consumers with one group. diff --git a/search/docs/nats/jetstream/ack.md b/search/docs/nats/jetstream/ack.md deleted file mode 100644 index f74b0d0..0000000 --- a/search/docs/nats/jetstream/ack.md +++ /dev/null @@ -1,78 +0,0 @@ -# Consuming Acknowledgements - -As you may know, *Nats* employs a rather extensive [Acknowledgement](https://docs.nats.io/using-nats/developer/develop_jetstream#acknowledging-messages){.external-link target="_blank"} policy. - -In most cases, **FastStream** automatically acknowledges (*acks*) messages on your behalf. When your function executes correctly, including sending all responses, a message will be acknowledged (and rejected in case of an exception). - -However, there are situations where you might want to use different acknowledgement logic. - -## Retries - -If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic. - -By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue: - -```python -@broker.subscriber("test", retry=False) # don't handle exceptions -async def base_handler(body: str): - ... -``` - -If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one: - -```python -@broker.subscriber("test", retry=True) # try again indefinitely -async def base_handler(body: str): - ... -``` - -!!! tip - For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"} - -## Manual Acknowledgement - -If you want to acknowledge a message manually, you can get access directy to the message object via the [Context](../../getting-started/context/existed.md){.internal-link} and call the method. - -```python -from faststream.nats.annotations import NatsMessage - -@broker.subscriber("test") -async def base_handler(body: str, msg: NatsMessage): - await msg.ack() - # or - await msg.nack() - # or - await msg.reject() -``` - -**FastStream** will see that the message was already acknowledged and will do nothing at the end of the process. - -## Interrupt Process - -If you want to interrupt message processing at any call stack, you can raise `faststream.exceptions.AckMessage` - -``` python linenums="1" hl_lines="2 16" -from faststream import FastStream -from faststream.exceptions import AckMessage -from faststream.nats import NatsBroker - -broker = NatsBroker("nats://localhost:4222") -app = FastStream(broker) - - -@broker.subscriber("test-subject", stream="test-stream") -async def handle(body): - smth_processing(body) - - -def smth_processing(body): - if True: - raise AckMessage() - - -@app.after_startup -async def test_publishing(): - await broker.publish("Hello!", "test-subject") -``` - -This way, **FastStream** interrupts the current message proccessing and acknowledges it immediately. Also, you can raise `NackMessage` and `RejectMessage` too. diff --git a/search/docs/nats/jetstream/index.md b/search/docs/nats/jetstream/index.md deleted file mode 100644 index ddec9ff..0000000 --- a/search/docs/nats/jetstream/index.md +++ /dev/null @@ -1,53 +0,0 @@ -# NATS JetStream - -The default *NATS* usage is suitable for scenarios where: - -* The `publisher` and `consumer` are always online. -* The system can tolerate messages loss. - -If you need stricter restrictions, like: - -* An availability of a message processing confirmation mechanism (`ack`/`nack`). -* Message persistence (messages will accumulate in the queue when the `consumer` is offline). - -You should use the **NATS JetStream** extension. - -In fact, the **JetStream** extension is the same as *NATS*, with the addition of a persistent layer above the file system. Therefore, all interfaces for publishing and consuming messages are similar to regular *NATS* usage. - -However, the **JetStream** layer has many possibilities for configuration, from the policy of deleting old messages to the maximum stored messages number limit. You can find out more about all **JetStream** features in the official [documentation](https://docs.nats.io/using-nats/developer/develop_jetstream){.external-link target="_blank"}. - -!!! tip "" - If you have worked with other message brokers, then you should know that the logic of **JS** is closer to **Kafka** than to **RabbitMQ**: messages, after confirmation, are not deleted from the queue but remain there until the queue is full, and it will start deleting old messages (or in accordance with other logic that you can configure yourself). - - When connecting a `consumer` (and, especially, when reconnecting), you must determine for yourself according to what logic it will consume messages: from the subject beginning, starting with some message, starting from some time, only new ones, etc. Don't be surprised if a connection is restored, and your `consumer` starts to process all messages received earlier again - you haven't defined the rule. - -Also, **NATS JetStream** has built-in `key-value` (similar to **Redis**) and `object` (similar to **Minio**) storages, which, in addition to the interface for *put/get*, have the ability to subscribe to events, which can be extremely useful in various scenarios. - -**FastStream** does not provide access to this functionality directly, but it is covered by the [nats-py](https://github.com/nats-io/nats.py){.external-link target="_blank"} library used. You can access the **JS** object from the application context: - -```python linenums="1" hl_lines="2 7 11-12 21" -from faststream import FastStream, Logger -from faststream.nats import JStream, NatsBroker - -broker = NatsBroker() -app = FastStream(broker) - -stream = JStream(name="stream") - -@broker.subscriber( - "js-subject", - stream=stream, - deliver_policy="new", -) -async def handler(msg: str, logger: Logger): - logger.info(msg) - -@app.after_startup -async def test_send(): - await broker.publish("Hi!", "js-subject") - # publish with stream verification - await broker.publish("Hi!", "js-subject", stream="stream") -``` - -!!! tip - Using `JStream` object **FastStream** is trying to create/update stream with the object settings. To prevent this behavior and *just get already created stream*, please use `#!python JStream(..., declare=False)` option. diff --git a/search/docs/nats/jetstream/key-value.md b/search/docs/nats/jetstream/key-value.md deleted file mode 100644 index 3de9762..0000000 --- a/search/docs/nats/jetstream/key-value.md +++ /dev/null @@ -1,107 +0,0 @@ -# Key-Value Storage - -## Overview - -[*Key-Value*](https://docs.nats.io/nats-concepts/jetstream/key-value-store){.external-link target="_blank"} storage is just a high-level interface on top of *NatsJS*. - -It is a regular *JetStream*, where the *KV key* is a subject. - -`Put`/`Update` an object to *KV* by *key* - it's like publishing a new message to the corresponding subject in the stream. - -Thus, the `Get` command returns not only the current *key* value but the latest one with an offset of it. Additionally, you can ask for a specific value based on its offset in the *KV* stream. - -This interface provides you with rich abilities to use it like a regular *KV* storage (ignoring offset) + subscribe to *KV key* changes + ask for an old *KV value* revision. So you can use this feature in your application in a really different way. You can find some examples on the *NATS* developers' official [YouTube channel](https://youtube.com/@NATS_io?si=DWHvNFjsLruxg5OZ){.external-link target="_blank"} - -## FastStream Details - -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to get access into the inner `JetStream` object to create it manually. - -First of all, you need to create a *Key-Value* storage object and pass it into the context: - -```python linenums="1" hl_lines="14-15" -from faststream import Context, FastStream, Logger -from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo -broker = NatsBroker() -app = FastStream(broker) -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) -``` - -!!! tip - We placed this code in `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. - - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. - ---- - -Next, we are ready to use this object right in our handlers. - -Let's create an annotated object to shorten context object access: - -```python linenums="1" hl_lines="5" -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated -KeyValue = Annotated[KV, Context("kv")] -``` - -And just use it in a handler: - -```python linenums="1" hl_lines="4 7-8" -from faststream import Logger -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" -``` - -Finally, let's test our code behavior by putting something into the KV storage and sending a message: - -```python linenums="1" hl_lines="3-4" -@app.after_startup -async def test_send(kv: KeyValue): - await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") -``` - -??? example "Full listing" - ```python linenums="1" -from nats.js.kv import KeyValue as KV -from typing_extensions import Annotated - -from faststream import Logger -from faststream import Context, FastStream, Logger -from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -KeyValue = Annotated[KV, Context("kv")] - -broker = NatsBroker() -app = FastStream(broker) - - -@broker.subscriber("subject") -async def handler(msg: str, kv: KeyValue, logger: Logger): - logger.info(msg) - kv_data = await kv.get("key") - assert kv_data.value == b"Hello!" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - kv = await broker.stream.create_key_value(bucket="bucket") - context.set_global("kv", kv) - - -@app.after_startup -async def test_send(kv: KeyValue): - await kv.put("key", b"Hello!") - await broker.publish("Hi!", "subject") - ``` diff --git a/search/docs/nats/jetstream/object.md b/search/docs/nats/jetstream/object.md deleted file mode 100644 index 7e24a7a..0000000 --- a/search/docs/nats/jetstream/object.md +++ /dev/null @@ -1,111 +0,0 @@ -# Object Storage - -Object storage is almost identical to the [*Key-Value*](./key-value.md) stroge concept, so you can reuse the guide. - -## Overview - -[*Object Storage*](https://docs.nats.io/nats-concepts/jetstream/obj_store){.external-link target="_blank"} is just a high-level interface on top of *NatsJS*. - -It is a regular *JetStream*, where the *Object key* is a subject. - -The main difference between *KV* and *Object* storages is that in the *Object* storage, you can store files greater than **1MB** (a limitation of *KV*). It has no limit on the maximum object size and stores it in chunks (each message is an object chunk), so you can literally *stream* huge objects through *NATS*. - -## FastStream Details - -**FastStream** has no native interfaces to this *NatsJS* functionality (yet), but it allows you to access the inner `JetStream` object to create in manually. - -First of all, you need to create an *Object* storage object and pass in to the context: - -```python linenums="1" hl_lines="14-15" -from faststream import Context, FastStream -from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo -broker = NatsBroker() -app = FastStream(broker) -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) -``` - -!!! tip - We placed this code in the `#!python @app.on_startup` hook because `#!python @app.after_startup` will be triggered **AFTER** your handlers start consuming messages. So, if you need to have access to any custom context objects, you should set them up in the `#!python @app.on_startup` hook. - - Also, we call `#!python await broker.connect()` method manually to establish the connection to be able to create a storage. - ---- - -Next, we are ready to use this object right in the our handlers. - -Let's create an Annotated object to shorten `Context` object access: - -```python linenums="1" hl_lines="5" -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated -ObjectStorage = Annotated[OS, Context("OS")] -``` - -And just use it in a handler: - -```python linenums="1" hl_lines="8 10-11" -from io import BytesIO -from faststream import Logger -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" -``` - -Finally, let's test our code behavior by putting something into the *Object storage* and sending a message: - -```python linenums="1" hl_lines="3-4" -@app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - await broker.publish("Hi!", "subject") -``` - -!!! tip - [`BytesIO`](https://docs.python.org/3/library/io.html#binary-i-o){.external-link target="_blank"} - is a *Readable* object used to emulate a file opened for reading. - -??? example "Full listing" - ```python linenums="1" -from io import BytesIO - -from nats.js.object_store import ObjectStore as OS -from typing_extensions import Annotated - -from faststream import Logger -from faststream import Context, FastStream -from faststream.nats import NatsBroker -from faststream.nats.annotations import ContextRepo - -ObjectStorage = Annotated[OS, Context("OS")] - -broker = NatsBroker() -app = FastStream(broker) - - -@broker.subscriber("subject") -async def handler(msg: str, os: ObjectStorage, logger: Logger): - logger.info(msg) - obj = await os.get("file") - assert obj.data == b"File mock" - - -@app.on_startup -async def setup_broker(context: ContextRepo): - await broker.connect() - - os = await broker.stream.create_object_store("bucket") - context.set_global("OS", os) - - -@app.after_startup -async def test_send(os: ObjectStorage): - await os.put("file", BytesIO(b"File mock")) - await broker.publish("Hi!", "subject") - ``` diff --git a/search/docs/nats/message.md b/search/docs/nats/message.md deleted file mode 100644 index 36fd983..0000000 --- a/search/docs/nats/message.md +++ /dev/null @@ -1,104 +0,0 @@ -# Access to Message Information - -As you know, **FastStream** serializes a message body and provides you access to it through function arguments. But sometimes you want access to message_id, headers, or other meta-information. - -## Message Access - -You can get it in a simple way: just acces the message object in the [Context](../getting-started/context/existed.md){.internal-link}! - -It contains the required information such as: - -* `#!python body: bytes` -* `#!python decoded_body: Any` -* `#!python content_type: str` -* `#!python reply_to: str` -* `#!python headers: dict[str, Any]` -* `#!python message_id: str` -* `#!python correlation_id: str` - -It is a **FastStream** wrapper around a native broker library message (`nats.aio.msg.Msg` in the *NATS*' case), you can access with `raw_message`. - -```python hl_lines="1 6" -from faststream.nats.annotations import NatsMessage - -@broker.subscriber("test") -async def base_handler( - body: str, - msg: NatsMessage, -): - print(msg.correlation_id) -``` - -Also, if you can't find the information you reqiure, you can get access directly to the wrapped `nats.aio.msg.Msg`, which contains complete message information. - -```python hl_lines="6" -from nats.aio.msg import Msg -from faststream.nats.annotations import NatsMessage - -@broker.subscriber("test") -async def base_handler(body: str, msg: NatsMessage): - raw: Msg = msg.raw_message - print(raw) -``` - -## Message Fields Access - -But in most cases, you don't need all message fields; you need to access some of them. You can use [Context Fields access](../getting-started/context/fields.md){.internal-link} feature for this reason. - -For example, you can get access to the `correlation_id` like this: - -```python hl_lines="6" -from faststream import Context - -@broker.subscriber("test") -async def base_handler( - body: str, - cor_id: str = Context("message.correlation_id"), -): - print(cor_id) -``` - -Or even directly from the raw message: - -```python hl_lines="6" -from faststream import Context - -@broker.subscriber("test") -async def base_handler( - body: str, - cor_id: str = Context("message.raw_message.correlation_id"), -): - print(cor_id) -``` - -But this code is too long to reuse everywhere. In this case, you can use a Python [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-link target="_blank"} feature: - -=== "python 3.9+" - ```python hl_lines="4 9" - from types import Annotated - from faststream import Context - - CorrelationId = Annotated[str, Context("message.correlation_id")] - - @broker.subscriber("test") - async def base_handler( - body: str, - cor_id: CorrelationId, - ): - print(cor_id) - ``` - -=== "python 3.6+" - ```python hl_lines="4 9" - from typing_extensions import Annotated - from faststream import Context - - CorrelationId = Annotated[str, Context("message.correlation_id")] - - @broker.subscriber("test") - async def base_handler( - body: str, - cor_id: CorrelationId, - ): - print(cor_id) - ``` diff --git a/search/docs/nats/publishing/index.md b/search/docs/nats/publishing/index.md deleted file mode 100644 index 898bf2b..0000000 --- a/search/docs/nats/publishing/index.md +++ /dev/null @@ -1,40 +0,0 @@ -# Publishing - -**FastStream** `NatsBroker` supports all regular [publishing usecases](../../getting-started/publishing/index.md){.internal-link}. You can use them without any changes. - -However, if you wish to further customize the publishing logic, you should take a deeper look at specific `NatsBroker` parameters. - -## NATS Publishing - -`NatsBroker` also uses the unified `publish` method (from a `publisher` object) to send messages. - -``` python -import asyncio -from faststream.nats import NatsBroker - -async def pub(): - async with NatsBroker() as broker: - await broker.publish( - "Hi!", - subject="test", - ) - -asyncio.run(pub()) -``` - -## Basic Arguments - -The `publish` method accepts the following arguments: - -* `#!python message = ""` - message to send. -* `#!python subject: str` - *subject* where the message will be sent. - -## Message Parameters - -* `#!python headers: dict[str, str] | None = None` - headers of the message being sent (used by consumers). -* `#!python correlation_id: str | None = None` - message id, which helps to match the original message with the reply to it (generated automatically). - -## NatsJS Parameters - -* `#!python stream: str | None = None` - validate that the subject is in the stream. -* `#!python timeout: float | None = None` - wait for the NATS server response. diff --git a/search/docs/nats/rpc.md b/search/docs/nats/rpc.md deleted file mode 100644 index 33a9fe8..0000000 --- a/search/docs/nats/rpc.md +++ /dev/null @@ -1,45 +0,0 @@ -# RPC over NATS - -Because **NATS** has zero cost for creating new subjects, we can easily set up a new subject consumer just for the one response message. This way, your request message will be published to one topic, and the response message will be consumed from another one (temporary subject), which allows you to use regular **FastStream RPC** syntax in the **NATS** case too. - -!!! tip - **FastStream RPC** over **NATS** works in both the *NATS-Core* and *NATS-JS* cases as well, but in the *NATS-JS* case, you have to specify the expected `stream` as a publish argument. - -## Blocking Request - -**FastStream** provides you with the ability to send a blocking RPC request over *NATS* in a very simple way. - -Just send a message like a regular one and get a response synchronously. - -It is very close to the common **requests** syntax: - -``` python hl_lines="1 4" -msg = await broker.publish( - "Hi!", - subject="test", - rpc=True, -) -``` - -Also, you have two extra options to control this behavior: - -* `#!python rpc_timeout: Optional[float] = 30.0` - controls how long you are waiting for a response. -* `#!python raise_timeout: bool = False` - by default, a timeout request returns `None`, but if you need to raise a `TimeoutException` directly, you can specify this option. - -## Reply-To - -Also, if you want to create a permanent request-reply data flow, probably, you should create a permanent subject to consume responses. - -So, if you have such one, you can specify it with the `reply_to` argument. This way, **FastStream** will send a response to this subject automatically. - -``` python hl_lines="1 8" -@broker.subscriber("response-subject") -async def consume_responses(msg): - ... - -msg = await broker.publish( - "Hi!", - subject="test", - reply_to="response-subject", -) -``` diff --git a/search/docs/rabbit/ack.md b/search/docs/rabbit/ack.md deleted file mode 100644 index c37b8c3..0000000 --- a/search/docs/rabbit/ack.md +++ /dev/null @@ -1,90 +0,0 @@ -# Consuming Acknowledgements - -As you may know, *RabbitMQ* employs a rather extensive [Acknowledgement](https://www.rabbitmq.com/confirms.html){.external-link target="_blank"} policy. - -In most cases, **FastStream** automatically acknowledges (*acks*) messages on your behalf. When your function executes correctly, including sending all responses, a message will be acknowledged (and rejected in case of an exception). - -However, there are situations where you might want to use a different acknowledgement logic. - -## Retries - -If you prefer to use a *nack* instead of a *reject* when there's an error in message processing, you can specify the `retry` flag in the `#!python @broker.subscriber(...)` method, which is responsible for error handling logic. - -By default, this flag is set to `False`, indicating that if an error occurs during message processing, the message can still be retrieved from the queue: - -```python -@broker.subscriber("test", retry=False) # don't handle exceptions -async def base_handler(body: str): - ... -``` - -If this flag is set to `True`, the message will be *nack*ed and placed back in the queue each time an error occurs. In this scenario, the message can be processed by another consumer (if there are several of them) or by the same one: - -```python -@broker.subscriber("test", retry=True) # try again indefinitely -async def base_handler(body: str): - ... -``` - -If the `retry` flag is set to an `int`, the message will be placed back in the queue, and the number of retries will be limited to this number: - -```python -@broker.subscriber("test", retry=3) # make up to 3 attempts -async def base_handler(body: str): - ... -``` - -!!! bug - At the moment, attempts are counted only by the current consumer. If the message goes to another consumer, it will have its own counter. - Subsequently, this logic will be reworked. - -!!! tip - For more complex error handling cases, you can use [tenacity](https://tenacity.readthedocs.io/en/latest/){.external-link target="_blank"} - -## Manual acknowledgement - -If you want to acknowledge a message manually, you can get access directy to the message object via the [Context](../getting-started/context/existed.md){.internal-link} and call the method. - -```python -from faststream.rabbit.annotations import RabbitMessage - -@broker.subscriber("test") -async def base_handler(body: str, msg: RabbitMessage): - await msg.ack() - # or - await msg.nack() - # or - await msg.reject() -``` - -**FastStream** will see that the message was already acknowledged and will do nothing at process end. - -## Interrupt Process - -If you want to interrupt message processing at any call stack, you can raise `faststream.exceptions.AckMessage` - -``` python linenums="1" hl_lines="2 16" -from faststream import FastStream -from faststream.exceptions import AckMessage -from faststream.rabbit import RabbitBroker - -broker = RabbitBroker("amqp://guest:guest@localhost:5672/") -app = FastStream(broker) - - -@broker.subscriber("test-queue") -async def handle(body): - smth_processing(body) - - -def smth_processing(body): - if True: - raise AckMessage() - - -@app.after_startup -async def test_publishing(): - await broker.publish("Hello!", "test-queue") -``` - -This way, **FastStream** interrupts the current message proccessing and acknowledges it immediately. Also, you can raise `NackMessage` and `RejectMessage` too. diff --git a/search/docs/rabbit/declare.md b/search/docs/rabbit/declare.md deleted file mode 100644 index 469e08a..0000000 --- a/search/docs/rabbit/declare.md +++ /dev/null @@ -1,40 +0,0 @@ -# RabbitMQ Queue/Exchange Declaration - -**FastStream** declares and validates all exchanges and queues using *publishers* and *subscribers* *RabbitMQ* objects, but sometimes you need to declare them manually. - -**RabbitBroker** provides a way to achieve this easily. - -``` python linenums="1" hl_lines="15-20 22-27" -from faststream import FastStream -from faststream.rabbit import ( - ExchangeType, - RabbitBroker, - RabbitExchange, - RabbitQueue, -) - -broker = RabbitBroker() -app = FastStream(broker) - - -@app.after_startup -async def declare_smth(): - await broker.declare_exchange( - RabbitExchange( - name="some-exchange", - type=ExchangeType.FANOUT, - ) - ) - - await broker.declare_queue( - RabbitQueue( - name="some-queue", - durable=True, - ) - ) -``` - -These methods require just one argument (`RabbitQueue`/`RabbitExchange`) containing information about your *RabbitMQ* required objects. They declare/validate *RabbitMQ* objects and return low-level **aio-pika** robust objects to interact with. - -!!! tip - Also, these methods are idempotent, so you can call them with the same arguments multiple times, but the objects will be created once; next time the method will return an already stored object. This way you can get access to any queue/exchange created automatically. diff --git a/search/docs/rabbit/examples/direct.md b/search/docs/rabbit/examples/direct.md deleted file mode 100644 index c2a1fb0..0000000 --- a/search/docs/rabbit/examples/direct.md +++ /dev/null @@ -1,129 +0,0 @@ -# Direct Exchange - -The **Direct** Exchange is the basic way to route messages in *RabbitMQ*. Its core is very simple: the `exchange` sends messages to those queues whose `routing_key` matches the `routing_key` of the message being sent. - -!!! note - The **Default** Exchange, to which all queues in *RabbitMQ* are subscribed, has the **Direct** type by default. - -## Scaling - -If several consumers are listening to the same queue, messages will be distributed to one of them (round-robin). This behavior is common for all types of `exchange` because it refers to the queue itself. The type of `exchange` affects which queues the message gets into. - -Thus, *RabbitMQ* can independently balance the load on queue consumers. You can increase the processing speed of the message flow from the queue by launching additional instances of a consumer service. You don't need to make changes to the current infrastructure configuration: RabbitMQ will take care of how to distribute messages between your services. - -## Example - -!!! tip - The **Direct** Exchange is the type used in **FastStream** by default. You can simply declare it as follows: - - ```python - @broker.subscriber("test_queue", "test_exchange") - async def handler(): - ... - ``` - -The argument `auto_delete=True` in this and subsequent examples is used only to clear the state of *RabbitMQ* after example runs. - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.rabbit import RabbitBroker, RabbitExchange, RabbitQueue - -broker = RabbitBroker() -app = FastStream(broker) - -exch = RabbitExchange("exchange", auto_delete=True) - -queue_1 = RabbitQueue("test-q-1", auto_delete=True) -queue_2 = RabbitQueue("test-q-2", auto_delete=True) - - -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") - - -@app.after_startup -async def send_messages(): - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 2 - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 - await broker.publish(queue="test-q-2", exchange=exch) # handlers: 3 -``` - -### Consumer Announcement - -First, we announce our **Direct** exchange and several queues that will listen to it: - -```python linenums="7" -exch = RabbitExchange("exchange", auto_delete=True) - -queue_1 = RabbitQueue("test-q-1", auto_delete=True) -queue_2 = RabbitQueue("test-q-2", auto_delete=True) -``` - -Then we sign up several consumers using the advertised queues to the `exchange` we created: - -```python linenums="13" hl_lines="1 6 11" -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") -``` - -!!! note - `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: - within a single service, this does not make sense, since messages will come to these handlers in turn. - Here we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now, the distribution of messages between these consumers will look like this: - -```python linenums="30" - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 -``` - -Message `1` will be sent to `handler1` because it listens to the `exchange` using a queue with the routing key `test-q-1`. - ---- - -```python linenums="31" - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 2 -``` - -Message `2` will be sent to `handler2` because it listens to the `exchange` using the same queue, but `handler1` is busy. - ---- - -```python linenums="32" - await broker.publish(queue="test-q-1", exchange=exch) # handlers: 1 -``` - -Message `3` will be sent to `handler1` again because it is currently free. - ---- - -```python linenums="33" - await broker.publish(queue="test-q-2", exchange=exch) # handlers: 3 -``` - -Message `4` will be sent to `handler3` because it is the only one listening to the `exchange` using a queue with the routing key `test-q-2`. diff --git a/search/docs/rabbit/examples/fanout.md b/search/docs/rabbit/examples/fanout.md deleted file mode 100644 index 6cb9de1..0000000 --- a/search/docs/rabbit/examples/fanout.md +++ /dev/null @@ -1,93 +0,0 @@ -# Fanout Exchange - -The **Fanout** Exchange is an even simpler, but slightly less popular way of routing in *RabbitMQ*. This type of `exchange` sends messages to all queues subscribed to it, ignoring any arguments of the message. - -At the same time, if the queue listens to several consumers, messages will also be distributed among them. - -## Example - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue - -broker = RabbitBroker() -app = FastStream(broker) - -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.FANOUT) - -queue_1 = RabbitQueue("test-q-1", auto_delete=True) -queue_2 = RabbitQueue("test-q-2", auto_delete=True) - - -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") - - -@app.after_startup -async def send_messages(): - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 -``` - -### Consumer Announcement - -To begin with, we announced our **Fanout** exchange and several queues that will listen to it: - -```python linenums="7" hl_lines="1" -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.FANOUT) - -queue_1 = RabbitQueue("test-q-1", auto_delete=True) -queue_2 = RabbitQueue("test-q-2", auto_delete=True) -``` - -Then we signed up several consumers using the advertised queues to the `exchange` we created: - -```python linenums="13" hl_lines="1 6 11" -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") -``` - -!!! note - `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: - within a single service, this does not make sense, since messages will come to these handlers in turn. - Here we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now the all messages will be send to all subscribers due they are binded to the same **FANOUT** exchange: - -```python linenums="30" - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 - await broker.publish(exchange=exch) # handlers: 1, 2, 3 -``` - ---- - -!!! note - When sending messages to **Fanout** exchange, it makes no sense to specify the arguments `queue` or `routing_key`, because they will be ignored. diff --git a/search/docs/rabbit/examples/headers.md b/search/docs/rabbit/examples/headers.md deleted file mode 100644 index 017bcb7..0000000 --- a/search/docs/rabbit/examples/headers.md +++ /dev/null @@ -1,181 +0,0 @@ -# Header Exchange - -The **Header** Exchange is the most complex and flexible way to route messages in *RabbitMQ*. This `exchange` type sends messages to queues according by matching the queue binding arguments with message headers. - -At the same time, if several consumers are subscribed to the queue, messages will also be distributed among them. - -## Example - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue - -broker = RabbitBroker() -app = FastStream(broker) - -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.HEADERS) - -queue_1 = RabbitQueue( - "test-queue-1", - auto_delete=True, - bind_arguments={"key": 1}, -) -queue_2 = RabbitQueue( - "test-queue-2", - auto_delete=True, - bind_arguments={"key": 2, "key2": 2, "x-match": "any"}, -) -queue_3 = RabbitQueue( - "test-queue-3", - auto_delete=True, - bind_arguments={"key": 2, "key2": 2, "x-match": "all"}, -) - - -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") - - -@broker.subscriber(queue_3, exch) -async def base_handler4(logger: Logger): - logger.info("base_handler4") - - -@app.after_startup -async def send_messages(): - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 2 - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 - await broker.publish(exchange=exch, headers={"key": 2}) # handlers: 3 - await broker.publish(exchange=exch, headers={"key2": 2}) # handlers: 3 - await broker.publish( - exchange=exch, headers={"key": 2, "key2": 2.0} - ) # handlers: 3, 4 -``` - -### Consumer Announcement - -First, we announce our **Header** exchange and several queues that will listen to it: - -```python linenums="7" hl_lines="1 6 11 16" -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.HEADERS) - -queue_1 = RabbitQueue( - "test-queue-1", - auto_delete=True, - bind_arguments={"key": 1}, -) -queue_2 = RabbitQueue( - "test-queue-2", - auto_delete=True, - bind_arguments={"key": 2, "key2": 2, "x-match": "any"}, -) -queue_3 = RabbitQueue( - "test-queue-3", - auto_delete=True, - bind_arguments={"key": 2, "key2": 2, "x-match": "all"}, -) -``` - -The `x-match` argument indicates whether the arguments should match the message headers in whole or in part. - -Then we signed up several consumers using the advertised queues to the `exchange` we created: - -```python linenums="26" hl_lines="1 6 11 16" -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") - - -@broker.subscriber(queue_3, exch) -async def base_handler4(logger: Logger): - logger.info("base_handler4") -``` - -!!! note - `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: - within a single service, this does not make sense, since messages will come to these handlers in turn. - Here we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now the distribution of messages between these consumers will look like this: - -```python linenums="48" - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 -``` - -Message `1` will be sent to `handler1` because it listens to a queue whose `key` header matches the `key` header of the message. - ---- - -```python linenums="49" - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 2 -``` - -Message `2` will be sent to `handler2` because it listens to `exchange` using the same queue, but `handler1` is busy. - ---- - -```python linenums="50" - await broker.publish(exchange=exch, headers={"key": 1}) # handlers: 1 -``` - -Message `3` will be sent to `handler1` again because it is currently free. - ---- - -```python linenums="51" - await broker.publish(exchange=exch, headers={"key": 2}) # handlers: 3 -``` - -Message `4` will be sent to `handler3` because it listens to a queue whose `key` header coincided with the `key` header of the message. - ---- - -```python linenums="52" - await broker.publish(exchange=exch, headers={"key2": 2}) # handlers: 3 -``` - -Message `5` will be sent to `handler3` because it listens to a queue whose header `key2` coincided with the header `key2` of the message. - ---- - -```python linenums="53" - await broker.publish( - exchange=exch, headers={"key": 2, "key2": 2.0} - ) # handlers: 3, 4 -``` - -Message `6` will be sent to `handler3` and `handler4` because the message headers completely match the queue keys. - ---- - -!!! note - When sending messages to **Header** exchange, it makes no sense to specify the arguments `queue` or `routing_key`, because they will be ignored - -!!! warning - For incredibly complex routes, you can use the option to bind an `exchange` to another `exchange`. In this case, all the same rules apply as for queues subscribed to `exchange`. The only difference is that the signed `exchange` can further distribute messages according to its own rules. - - So, for example, you can combine Topic and Header exchange types. diff --git a/search/docs/rabbit/examples/index.md b/search/docs/rabbit/examples/index.md deleted file mode 100644 index 167f06f..0000000 --- a/search/docs/rabbit/examples/index.md +++ /dev/null @@ -1,36 +0,0 @@ -# Basic Subscriber - -If you know nothing about *RabbitMQ* and how it works, you will still able to use **FastStream RabbitBroker**. - -Just use the `#!python @broker.subscriber(...)` method with a string as a routing key. - -```python linenums="1" -from faststream import FastStream -from faststream.rabbit import RabbitBroker - -broker = RabbitBroker() -app = FastStream(broker) - - -@broker.subscriber("routing_key") # handle messages by routing key -async def handle(msg): - print(msg) - - -@app.after_startup -async def test_publish(): - await broker.publish( - "message", - "routing_key", # publish message with routing key - ) -``` - -This is the principle all **FastStream** brokers work by: you don't need to learn them in-depth if you want to *just send a message*. - -## RabbitMQ Details - -If you are already familiar with *RabbitMQ* logic, you should also be acquainted with the inner workings of the example mentioned above. - -In this case, **FastStream** either creates or validates a queue with a specified **routing_key** and binds it to the default *RabbitMQ* exchange. - -If you want to specify a *queue*-*exchange* pair with additional arguments, **FastStream** provides you with the ability to do so. You can use special `RabbitQueue` and `RabbitExchange` objects to configure RabbitMQ queues, exchanges, and binding properties. For examples of using various types of exchanges, please refer to the following articles. diff --git a/search/docs/rabbit/examples/stream.md b/search/docs/rabbit/examples/stream.md deleted file mode 100644 index 697879e..0000000 --- a/search/docs/rabbit/examples/stream.md +++ /dev/null @@ -1,36 +0,0 @@ -# RabbitMQ Streams - -*RabbitMQ* has a [Streams](https://www.rabbitmq.com/streams.html){.exteranl-link target="_blank"} feature, which is closely related to *Kafka* topics. - -The main difference from regular *RabbitMQ* queues is that the messages are not deleted after consuming. - -And **FastStream** supports this feature as well! - -```python linenums="1" hl_lines="4 10-12 17" -from faststream import FastStream, Logger -from faststream.rabbit import RabbitBroker, RabbitQueue - -broker = RabbitBroker(max_consumers=10) -app = FastStream(broker) - -queue = RabbitQueue( - name="test-stream", - durable=True, - arguments={ - "x-queue-type": "stream", - }, -) - - -@broker.subscriber( - queue, - consume_args={"x-stream-offset": "first"}, -) -async def handle(msg, logger: Logger): - logger.info(msg) - - -@app.after_startup -async def test(): - await broker.publish("Hi!", queue) -``` diff --git a/search/docs/rabbit/examples/topic.md b/search/docs/rabbit/examples/topic.md deleted file mode 100644 index 7b32df7..0000000 --- a/search/docs/rabbit/examples/topic.md +++ /dev/null @@ -1,113 +0,0 @@ -# Topic Exchange - -The **Topic** Exchange is a powerful *RabbitMQ* routing tool. This type of `exchange` sends messages to the queue in accordance with the *pattern* specified when they are connected to `exchange` and the `routing_key` of the message itself. - -At the same time, if several consumers are subscribed to the queue, messages will be distributed among them. - -## Example - -```python linenums="1" -from faststream import FastStream, Logger -from faststream.rabbit import ExchangeType, RabbitBroker, RabbitExchange, RabbitQueue - -broker = RabbitBroker() -app = FastStream(broker) - -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.TOPIC) - -queue_1 = RabbitQueue("test-queue-1", auto_delete=True, routing_key="*.info") -queue_2 = RabbitQueue("test-queue-2", auto_delete=True, routing_key="*.debug") - - -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") - - -@app.after_startup -async def send_messages(): - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 2 - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 - await broker.publish(routing_key="logs.debug", exchange=exch) # handlers: 3 -``` - -### Consumer Announcement - -First, we announce our **Topic** exchange and several queues that will listen to it: - -```python linenums="7" hl_lines="1 3-4" -exch = RabbitExchange("exchange", auto_delete=True, type=ExchangeType.TOPIC) - -queue_1 = RabbitQueue("test-queue-1", auto_delete=True, routing_key="*.info") -queue_2 = RabbitQueue("test-queue-2", auto_delete=True, routing_key="*.debug") -``` - -At the same time, in the `routing_key` of our queues, we specify the *pattern* of routing keys that will be processed by this queue. - -Then we sign up several consumers using the advertised queues to the `exchange` we created: - -```python linenums="13" hl_lines="1 6 11" -@broker.subscriber(queue_1, exch) -async def base_handler1(logger: Logger): - logger.info("base_handler1") - - -@broker.subscriber(queue_1, exch) # another service -async def base_handler2(logger: Logger): - logger.info("base_handler2") - - -@broker.subscriber(queue_2, exch) -async def base_handler3(logger: Logger): - logger.info("base_handler3") -``` - -!!! note - `handler1` and `handler2` are subscribed to the same `exchange` using the same queue: - within a single service, this does not make sense, since messages will come to these handlers in turn. - Here we emulate the work of several consumers and load balancing between them. - -### Message Distribution - -Now the distribution of messages between these consumers will look like this: - -```python linenums="30" - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 -``` - -Message `1` will be sent to `handler1` because it listens to `exchange` using a queue with the routing key `*.info`. - ---- - -```python linenums="31" - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 2 -``` - -Message `2` will be sent to `handler2` because it listens to `exchange` using the same queue, but `handler1` is busy. - ---- - -```python linenums="32" - await broker.publish(routing_key="logs.info", exchange=exch) # handlers: 1 -``` - -Message `3` will be sent to `handler1` again because it is currently free. - ---- - -```python linenums="33" - await broker.publish(routing_key="logs.debug", exchange=exch) # handlers: 3 -``` - -Message `4` will be sent to `handler3` because it is the only one listening to `exchange` using a queue with the routing key `*.debug`. diff --git a/search/docs/rabbit/index.md b/search/docs/rabbit/index.md deleted file mode 100644 index 92abf6f..0000000 --- a/search/docs/rabbit/index.md +++ /dev/null @@ -1,69 +0,0 @@ -# Rabbit Routing - -!!! note "" - **FastStream** *RabbitMQ* support is implemented on top of [**aio-pika**](https://aio-pika.readthedocs.io/en/latest/){.external-link target="_blank"}. You can always get access to objects of it, if you need to use some low-level methods, not represented in **FastStream**. - -## Advantages - -The advantage of *RabbitMQ* is the ability to configure flexible and complex message routing scenarios. - -*RabbitMQ* covers the whole range of routing: from one queue - one consumer, to a queue retrieved from several sources, including message prioritization. - -!!! note - For more information about *RabbitMQ*, please visit the [official documentation](https://www.rabbitmq.com/tutorials/amqp-concepts.html){.external-link target="_blank"} - -It supports the ability to successfully process messages, mark them as processed with an error, remove them from the queue (it is also impossible to re-receive processed messages, unlike **Kafka**), lock it for the processing duration, and monitor its current status. - -Having to keep track of the current status of all messages is a cause of the *RabbitMQ* performance issues. With really large message volumes, *RabbitMQ* starts to degrade. However, if this was a "one-time influx", then consumers will free the queue of messages and the "health" of *RabbitMQ* will be stable. - -If your scenario is not based on processing millions of messages and also requires building complex routing logic, *RabbitMQ* will be the right choice. - -## Basic Concepts - -If you want to totally understand how *RabbitMQ* works, you should visit their official website. There you will find top-level comments about the basic concepts and usage examples. - -### Entities - -*RabbitMQ* works with three main entities: - -* `Exchange` - the point of receiving messages from *publisher* -* `Queue` - the point of pushing messages to *consumer* -* `Binding` - the relationship between *queue-exchange* or *exchange-exchange* - -### Routing Rules - -The rules for delivering messages to consumers depend on the **type of exchange** and **binding** parameters. All the main options will be discussed at [examples](examples/direct.md){.internal-link}. - -In general, the message path looks so: - -1. *Publisher* sends a message to `exchange`, specify its `routing_key` and headers according to which routing will take place. -2. `Exchange`, depending on the message parameters, determines which of the subscribed `bindings` to send the message to. -3. `Binding` delivers the message to `queue` or another `exchange` (in this case it will send it further by its own rules). -4. `Queue`, after receiving a message, sends it to one of subscribed consumers (**PUSH API**). - -!!! tip - By default, all queues have a `binding` to the `default exchange` (**Direct** type) with a **routing key** corresponding to their name. - In **FastStream**, queues are connected to this `exchange`, and messages are sent by default unless another `exchange` is explicitly specified. - - !!! warning "" - Connecting the queue to any other `exchange` will still leave it subscribed to the `default exchange'. Be careful with this. - -At this stage, the message gets into your application - and you start processing it. - -### Message Statuses - -*RabbitMQ* requires confirmation of message processing: only after that, it will be removed from the queue. - -Confirmation can be either positive (`Acknowledgment - ack`) if the message was successfully processed or negative (`Negative Acknowledgment - nack`) if the message was processed with an error. - -At the same time, in case of an error, the message can also be extracted from the queue (`reject`); otherwise, after a negative confirmation, it will be requeued for processing again. - -In most cases, **FastStream** performs all the necessary actions by itself. However, if you want to manage the message lifecycle directly, you can access the message object itself and call the appropriate methods directly. This can be useful if you want to implement an "at most once" policy and you need to confirm the consuming of the message before it is actually processed. - -## **FastStream** Specific - -**FastStream** omits the ability to create `bindings` directly, since in most cases, you do not need to subscribe one queue to several `exchanges` or subscribe `exchanges` to each other. On the contrary, this practice leads to over-complication of the message routing scheme, which makes it difficult to maintain and further develop the entire infrastructure of services. - -**FastStream** suggests you adhere to the scheme `exchange:queue` as `1:N`, which will greatly simplify the scheme of interaction between your services. It is better to create an additional queue for a new `exchange` than to subscribe to an existing one. - -However, if you want to reduce the number of entities in your *RabbitMQ*, and thereby optimize its performance (or you know exactly what you are doing), **FastStream** leaves you the option to create `bindings` directly. In other cases, the connection parameters are an integral part of the entities **RabbitQueue** and **RabbitExchange** in **FastStream**. diff --git a/search/docs/rabbit/message.md b/search/docs/rabbit/message.md deleted file mode 100644 index 97180cd..0000000 --- a/search/docs/rabbit/message.md +++ /dev/null @@ -1,104 +0,0 @@ -# Access to Message Information - -As you know, **FastStream** serializes a message body and provides you access to it through function arguments. But sometimes you want access to a message_id, headers, or other meta-information. - -## Message Access - -You can get it in a simple way: just acces to the message object in the [Context](../getting-started/context/existed.md){.internal-link}! - -This message contains the required information such as: - -* `#!python body: bytes` -* `#!python decoded_body: Any` -* `#!python content_type: str` -* `#!python reply_to: str` -* `#!python headers: dict[str, Any]` -* `#!python message_id: str` -* `#!python correlation_id: str` - -Also, it is a **FastStream** wrapper around a native broker library message (`aio_pika.IncomingMessage` in the *RabbitMQ* case), you can access with `raw_message`. - -```python hl_lines="1 6" -from faststream.rabbit.annotations import RabbitMessage - -@broker.subscriber("test") -async def base_handler( - body: str, - msg: RabbitMessage, -): - print(msg.correlation_id) -``` - -Also, if you can't find the information you reqiure, you can get access directly to the wrapped `aio_pika.IncomingMessage`, which contains complete message information. - -```python hl_lines="6" -from aio_pika import IncomingMessage -from faststream.rabbit.annotations import RabbitMessage - -@broker.subscriber("test") -async def base_handler(body: str, msg: RabbitMessage): - raw: IncomingMessage = msg.raw_message - print(raw) -``` - -## Message Fields Access - -But in the most cases, you don't need all message fields; you need to access some of them. You can use [Context Fields access](../getting-started/context/fields.md){.internal-link} feature for this reason. - -For example, you can get access to the `correlation_id` like this: - -```python hl_lines="6" -from faststream import Context - -@broker.subscriber("test") -async def base_handler( - body: str, - cor_id: str = Context("message.correlation_id"), -): - print(cor_id) -``` - -Or even directly from the raw message: - -```python hl_lines="6" -from faststream import Context - -@broker.subscriber("test") -async def base_handler( - body: str, - cor_id: str = Context("message.raw_message.correlation_id"), -): - print(cor_id) -``` - -But this code is too long to be reused everywhere. In this case, you can use a Python [`Annotated`](https://docs.python.org/3/library/typing.html#typing.Annotated){.external-link target="_blank"} feature: - -=== "python 3.9+" - ```python hl_lines="4 9" - from types import Annotated - from faststream import Context - - CorrelationId = Annotated[str, Context("message.correlation_id")] - - @broker.subscriber("test") - async def base_handler( - body: str, - cor_id: CorrelationId, - ): - print(cor_id) - ``` - -=== "python 3.6+" - ```python hl_lines="4 9" - from typing_extensions import Annotated - from faststream import Context - - CorrelationId = Annotated[str, Context("message.correlation_id")] - - @broker.subscriber("test") - async def base_handler( - body: str, - cor_id: CorrelationId, - ): - print(cor_id) - ``` diff --git a/search/docs/rabbit/publishing.md b/search/docs/rabbit/publishing.md deleted file mode 100644 index f703695..0000000 --- a/search/docs/rabbit/publishing.md +++ /dev/null @@ -1,83 +0,0 @@ -# Publishing - -**FastStream** `RabbitBroker` supports all regular [publishing usecases](../getting-started/publishing/index.md){.internal-link}. you can use them without any changes. - -However, if you wish to further customize the publishing logic further, you should take a more deep-dive look at specific RabbitBroker parameters. - -## Rabbit Publishing - -`RabbitBroker` also uses the unified `publish` method (from a `publisher` object) to send messages. - -However, in this case, an object of the `aio_pika.Message` class (if necessary) can be used as a message (in addition to python primitives and `pydantic.BaseModel`). - -You can specify queue (used as a routing_key) and exchange (optionally) to send by their name. - -``` python -import asyncio -from faststream.rabbit import RabbitBroker - -async def pub(): - async with RabbitBroker() as broker: - await broker.publish( - "Hi!", - queue="test", - exchange="test" - ) - -asyncio.run(pub()) -``` - -If you don't specify any exchange, the message will be send to the default one. - -Also, you are able to use special **RabbitQueue** and **RabbitExchange** objects as `queue` and `exchange` arguments: - -``` python -from faststream.rabbit import RabbitExchange, RabbitQueue - -await broker.publish( - "Hi!", - queue=RabbitQueue("test"), - exchange=RabbitExchange("test") -) -``` - -If you specify exchange that doesn't exist, RabbitBroker will create a required one and then publish a message to it. - -!!! tip - Be accurate with it: if you have already created an **Exchange** with specific parameters and try to send a message by exchange name to it, the broker will try to create it. So, **Exchange** parameters conflict will occur. - - If you are trying to send a message to a specific **Exchange**, sending it with a defined **RabbitExchange** object is the preffered way. - -## Basic Arguments - -The `publish` method takes the following arguments: - -* `#!python message = ""` - message to send -* `#!python exchange: str | RabbitExchange | None = None` - the exchange where the message will be sent to. If not specified - *default* is used -* `#!python queue: str | RabbitQueue = ""` - the queue where the message will be sent (since most queues use their name as the routing key, this is a human-readable version of `routing_key`) -* `#!python routing_key: str = ""` - also a message routing key, if not specified, the `queue` argument will be used - -## Message Parameters - -You can read more about all the available flags in the [RabbitMQ documentation](https://www.rabbitmq.com/consumers.html){.external-link target="_blank"} - -* `#!python headers: dict[str, Any] | None = None` - message headers (used by consumers) -* `#!python content_type: str | None = None` - the content_type of the message being sent (set automatically, used by consumers) -* `#!python content_encoding: str | None = None` - encoding of the message (used by consumers) -* `#!python persist: bool = False` - restore messages on *RabbitMQ* reboot -* `#!python priority: int | None = None` - the priority of the message -* `#!python correlation_id: str | None = None` - message id, which helps to match the original message with the reply to it (generated automatically) -* `#!python message_id: str | None = None` - message ID (generated automatically) -* `#!python timestamp: int | float | time delta | datetime | None = None` - message sending time (set automatically) -* `#!python expiration: int | float | time delta | datetime | None = None` - message lifetime (in seconds) -* `#!python type: str | None = None` - the type of message (used by consumers) -* `#!python user_id: str | None = None` - ID of the *RabbitMQ* user who sent the message -* `#!python app_id: str | None = None` - ID of the application that sent the message (used by consumers) - -## Send Flags - -Arguments for sending a message: - -* `#!python mandatory: bool = True` - the client is waiting for confirmation that the message will be placed in some queue (if there are no queues, return it to the sender) -* `#!python immediate: bool = False` - the client expects that there is a consumer ready to take the message to work "right now" (if there is no consumer, return it to the sender) -* `#!python timeout: int | float | None = None` - send confirmation time from *RabbitMQ* diff --git a/search/docs/rabbit/rpc.md b/search/docs/rabbit/rpc.md deleted file mode 100644 index 57704a3..0000000 --- a/search/docs/rabbit/rpc.md +++ /dev/null @@ -1,42 +0,0 @@ -# RPC over RMQ - -## Blocking Request - -**FastStream** provides you with the ability to send a blocking RPC request over *RabbitMQ* in a very simple way. - -It uses the [**Direct Reply-To**](https://www.rabbitmq.com/direct-reply-to.html){.external-link target="_blank"} *RabbitMQ* feature, so you don't need to create any queues to consume a response. - -Just send a message like a regular one and get a response synchronously. - -It is very close to common **requests** syntax: - -``` python hl_lines="1 4" -msg = await broker.publish( - "Hi!", - queue="test", - rpc=True, -) -``` - -Also, you have two extra options to control this behavior: - -* `#!python rpc_timeout: Optional[float] = 30.0` - controls how long you are waiting for a response -* `#!python raise_timeout: bool = False` - by default, a timeout request returns `None`, but if you need to raise a `TimeoutException` directly, you can specify this option - -## Reply-To - -Also, if you want to create a permanent request-reply data flow, probably, you should create a permanent queue to consume responses. - -So, if you have such one, you can specify it with the `reply_to` argument. This way, **FastStream** will send a response to this queue automatically. - -``` python hl_lines="1 8" -@broker.subscriber("response-queue") -async def consume_responses(msg): - ... - -msg = await broker.publish( - "Hi!", - queue="test", - reply_to="response-queue", -) -``` diff --git a/search/docs/release.md b/search/docs/release.md deleted file mode 100644 index c094e07..0000000 --- a/search/docs/release.md +++ /dev/null @@ -1,36 +0,0 @@ ---- -hide: - - navigation - - footer ---- - -# Release Notes - -**FastStream** is a new package based on the ideas and experiences gained from [FastKafka](https://github.com/airtai/fastkafka){.external-link target="_blank"} and [Propan](https://github.com/lancetnik/propan){.external-link target="_blank"}. By joining our forces, we picked up the best from both packages and created the unified way to write services capable of processing streamed data regradless of the underliying protocol. We'll continue to maintain both packages, but new development will be in this project. If you are starting a new service, this package is the recommended way to do it. - -## Features - -[**FastStream**](https://faststream.airt.ai/) simplifies the process of writing producers and consumers for message queues, handling all the -parsing, networking and documentation generation automatically. - -Making streaming microservices has never been easier. Designed with junior developers in mind, **FastStream** simplifies your work while keeping the door open for more advanced use-cases. Here's a look at the core features that make **FastStream** a go-to framework for modern, data-centric microservices. - -- **Multiple Brokers**: **FastStream** provides a unified API to work across multiple message brokers (**Kafka**, **RabbitMQ** support) - -- [**Pydantic Validation**](#writing-app-code): Leverage [**Pydantic's**](https://docs.pydantic.dev/){.external-link target="_blank"} validation capabilities to serialize and validates incoming messages - -- [**Automatic Docs**](#project-documentation): Stay ahead with automatic [AsyncAPI](https://www.asyncapi.com/){.external-link target="_blank"} documentation. - -- **Intuitive**: full typed editor support makes your development experience smooth, catching errors before they reach runtime - -- [**Powerful Dependency Injection System**](#dependencies): Manage your service dependencies efficiently with **FastStream**'s built-in DI system. - -- [**Testable**](#testing-the-service): supports in-memory tests, making your CI/CD pipeline faster and more reliable - -- **Extendable**: use extensions for lifespans, custom serialization and middlewares - -- [**Integrations**](#any-framework): **FastStream** is fully compatible with any HTTP framework you want ([**FastAPI**](#fastapi-plugin) especially) - -- **Built for Automatic Code Generation**: **FastStream** is optimized for automatic code generation using advanced models like GPT and Llama - -That's **FastStream** in a nutshell—easy, efficient, and powerful. Whether you're just starting with streaming microservices or looking to scale, **FastStream** has got you covered. From 056f8fe786ed8361cd97299af8fa996d6dadf5b2 Mon Sep 17 00:00:00 2001 From: harishmohanraj Date: Mon, 9 Oct 2023 12:46:54 +0000 Subject: [PATCH 4/5] Remove unused type: ignore comment --- faststream_gen/_code_generator/chat.py | 2 +- faststream_gen/_code_generator/helper.py | 2 +- nbs/Chat.ipynb | 2 +- nbs/Helper.ipynb | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/faststream_gen/_code_generator/chat.py b/faststream_gen/_code_generator/chat.py index 5f0098a..8c0599e 100644 --- a/faststream_gen/_code_generator/chat.py +++ b/faststream_gen/_code_generator/chat.py @@ -98,7 +98,7 @@ def _get_relevant_document(query: str) -> str: The content of the most relevant document as a string. """ db_path = get_root_data_path() / "docs" - db = FAISS.load_local(db_path, OpenAIEmbeddings()) # type: ignore + db = FAISS.load_local(db_path, OpenAIEmbeddings()) results = db.max_marginal_relevance_search(query, k=1, fetch_k=3) results_str = "\n".join([result.page_content for result in results]) return results_str diff --git a/faststream_gen/_code_generator/helper.py b/faststream_gen/_code_generator/helper.py index 74cebbc..edbe6d9 100644 --- a/faststream_gen/_code_generator/helper.py +++ b/faststream_gen/_code_generator/helper.py @@ -197,7 +197,7 @@ def get_relevant_prompt_examples(query: str) -> Dict[str, str]: The dictionary of the most relevant examples for each step. """ db_path = get_root_data_path() / "examples" - db = FAISS.load_local(db_path, OpenAIEmbeddings()) # type: ignore + db = FAISS.load_local(db_path, OpenAIEmbeddings()) results = db.similarity_search(query, k=3, fetch_k=5) results_page_content = [r.page_content for r in results] prompt_examples = _format_examples(results_page_content) diff --git a/nbs/Chat.ipynb b/nbs/Chat.ipynb index a667c70..2bb1694 100644 --- a/nbs/Chat.ipynb +++ b/nbs/Chat.ipynb @@ -230,7 +230,7 @@ " The content of the most relevant document as a string.\n", " \"\"\"\n", " db_path = get_root_data_path() / \"docs\"\n", - " db = FAISS.load_local(db_path, OpenAIEmbeddings()) # type: ignore\n", + " db = FAISS.load_local(db_path, OpenAIEmbeddings())\n", " results = db.max_marginal_relevance_search(query, k=1, fetch_k=3)\n", " results_str = \"\\n\".join([result.page_content for result in results])\n", " return results_str" diff --git a/nbs/Helper.ipynb b/nbs/Helper.ipynb index 73551da..cbb031a 100644 --- a/nbs/Helper.ipynb +++ b/nbs/Helper.ipynb @@ -594,7 +594,7 @@ " The dictionary of the most relevant examples for each step.\n", " \"\"\"\n", " db_path = get_root_data_path() / \"examples\"\n", - " db = FAISS.load_local(db_path, OpenAIEmbeddings()) # type: ignore\n", + " db = FAISS.load_local(db_path, OpenAIEmbeddings())\n", " results = db.similarity_search(query, k=3, fetch_k=5)\n", " results_page_content = [r.page_content for r in results]\n", " prompt_examples = _format_examples(results_page_content)\n", From b474b01c60c11333de01b00224c26517135cdcd1 Mon Sep 17 00:00:00 2001 From: harishmohanraj Date: Mon, 9 Oct 2023 12:50:59 +0000 Subject: [PATCH 5/5] Update mypy version --- settings.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.ini b/settings.ini index 4e7661a..9143a10 100644 --- a/settings.ini +++ b/settings.ini @@ -58,7 +58,7 @@ dev_requirements = \ pytest==7.4.2 \ nbqa==1.7.0 \ black==23.7.0 \ - mypy==1.4.1 \ + mypy==1.5.1 \ isort==5.12.0 \ pre-commit==3.3.3 \ detect-secrets==1.4.0