diff --git a/docs/examples/code/fill_and_submit_web_form_crawler.py b/docs/examples/code/fill_and_submit_web_form_crawler.py index 7cd61be87..e2f74c6e5 100644 --- a/docs/examples/code/fill_and_submit_web_form_crawler.py +++ b/docs/examples/code/fill_and_submit_web_form_crawler.py @@ -18,7 +18,7 @@ async def request_handler(context: HttpCrawlingContext) -> None: request = Request.from_url( url='https://httpbin.org/post', method='POST', - data={ + payload={ 'custname': 'John Doe', 'custtel': '1234567890', 'custemail': 'johndoe@example.com', diff --git a/docs/examples/code/fill_and_submit_web_form_request.py b/docs/examples/code/fill_and_submit_web_form_request.py index 379eaec26..60a40860f 100644 --- a/docs/examples/code/fill_and_submit_web_form_request.py +++ b/docs/examples/code/fill_and_submit_web_form_request.py @@ -4,7 +4,7 @@ request = Request.from_url( url='https://httpbin.org/post', method='POST', - data={ + payload={ 'custname': 'John Doe', 'custtel': '1234567890', 'custemail': 'johndoe@example.com', diff --git a/docs/examples/fill_and_submit_web_form.mdx b/docs/examples/fill_and_submit_web_form.mdx index 49a8c7d2e..8498bb301 100644 --- a/docs/examples/fill_and_submit_web_form.mdx +++ b/docs/examples/fill_and_submit_web_form.mdx @@ -46,7 +46,7 @@ Now, let's create a POST request with the form fields and their values using the {RequestExample} -Alternatively, you can send form data as URL parameters using the `query_params` argument. It depends on the form and how it is implemented. However, sending the data as a POST request body using the `data` parameter is generally a better approach. +Alternatively, you can send form data as URL parameters using the `query_params` argument. It depends on the form and how it is implemented. However, sending the data as a POST request body using the `payload` is generally a better approach. ## Implementing the crawler