Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

👺 v1.0.0: formatting tweaks + dev env enhancements #7

Merged
merged 17 commits into from
Jan 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[flake8]
max-line-length = 88
extend-ignore = E203
6 changes: 6 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@ jobs:
- name: Check code formatting with black
run: |
pipenv run black --check .

- name: Check imports with isort
run: pipenv run isort . --check-only

- name: Lint with flake8
run: pipenv run flake8 src/

- name: Run pytest
run: |
Expand Down
12 changes: 9 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
repos:
- repo: https://github.com/psf/black
rev: stable
rev: 23.12.1
hooks:
- id: black
language_version: python3
- id: black
language_version: python3
- repo: https://github.com/pycqa/isort
rev: 5.13.2
hooks:
- id: isort
language_version: python3
args: ["--profile", "black"]
6 changes: 4 additions & 2 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@ black = "*"
pytest = "*"
tomli = "*"
exceptiongroup = "*"
scrapy-sentry-errors = {file = "."}
pre-commit = "*"
scrapy-sentry-errors = {file = ".", editable = true}
flake8 = "*"
isort = "*"

[scripts]
black = "black ."
setup = "bash scripts/dev-setup.sh"
example = "bash scripts/example.sh"
build = "bash scripts/build.sh"
deploy = "bash scripts/deploy.sh"
publish = "bash scripts/publish.sh"
test = "pytest"
47 changes: 45 additions & 2 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ A simple Scrapy extension that logs spider errors to your Sentry account, helpin

## 📋 Requirements

- Python 3.8+
- Python 3.8 or higher
- A [Sentry](http://www.getsentry.com/) account.
- The [DSN](https://docs.sentry.io/product/sentry-basics/concepts/dsn-explainer/) for your Sentry project.

Expand Down Expand Up @@ -53,7 +53,7 @@ Run the tests with:
pipenv run test
```

## Deployment
## 📦 Publishing

1. Bump the version number in `pyproject.toml`

Expand All @@ -64,7 +64,7 @@ pipenv run build

3. Publish the build to PyPI with:
```
pipenv run deploy
pipenv run publish
```

## 🤝 Contributing
Expand Down
5 changes: 2 additions & 3 deletions example_project/example_project/middlewares.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@
# See documentation in:
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html

from scrapy import signals

# useful for handling different item types with a single interface
from itemadapter import is_item, ItemAdapter
from itemadapter import ItemAdapter, is_item
from scrapy import signals


class ExampleProjectSpiderMiddleware:
Expand Down
6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
[project]
name = "scrapy-sentry-errors"
version = "1.0.0-beta.2"
version = "1.0.0"
description = "Scrapy extension that logs errors to Sentry"
authors = [
{name = "City Bureau", email = "[email protected]"}
]
dependencies = [
'scrapy>=2.0',
'scrapy>=2.5',
'sentry-sdk>=1.0.0',
]
requires-python = ">=3.8"
Expand All @@ -17,7 +17,7 @@ classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Development Status :: 4 - Beta',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Topic :: Software Development :: Libraries :: Application Frameworks',
Expand Down
File renamed without changes.
29 changes: 19 additions & 10 deletions src/scrapy_sentry_errors/extensions.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import logging
from io import StringIO
from typing import Optional, Dict, Any

from scrapy import signals
from scrapy.exceptions import NotConfigured
from typing import Any, Dict, Optional

import sentry_sdk
from scrapy import signals
from scrapy.crawler import Crawler
from scrapy.exceptions import CloseSpider
from twisted.python.failure import Failure


class Errors(object):
Expand Down Expand Up @@ -34,8 +35,8 @@ def get_client(

@classmethod
def from_crawler(
cls, crawler: "scrapy.crawler.Crawler", dsn: Optional[str] = None
) -> "Errors":
cls, crawler: Crawler, dsn: Optional[str] = None
) -> "Errors": # noqa
"""
Create an instance of Errors from a Scrapy crawler.

Expand All @@ -47,16 +48,22 @@ def from_crawler(
Errors: The Errors instance.

Raises:
NotConfigured: If no SENTRY_DSN is configured.
CloseSpider: If no SENTRY_DSN is configured.
"""
dsn = crawler.settings.get("SENTRY_DSN")
if dsn is None:
raise NotConfigured("No SENTRY_DSN configured")
logging.log(logging.ERROR, "SENTRY_DSN is not configured")
raise CloseSpider(
reason="SENTRY_DSN must be configured to enable \
scrapy-sentry-errors extension"
)

extension = cls(dsn=dsn)
crawler.signals.connect(extension.spider_error, signal=signals.spider_error)
logging.log(logging.INFO, "Scrapy integration active")
return extension

def spider_error(self, failure: "twisted.python.failure.Failure") -> None:
def spider_error(self, failure: Failure) -> None:
"""
Handle spider errors by capturing exceptions and logging them to Sentry.

Expand All @@ -66,4 +73,6 @@ def spider_error(self, failure: "twisted.python.failure.Failure") -> None:
traceback = StringIO()
failure.printTraceback(file=traceback)
self.client.capture_exception(failure.value)
logging.log(logging.WARNING, "Sentry Exception captured")
logging.log(
logging.INFO, "Exception captured by scrapy-sentry-errors extension"
)
6 changes: 4 additions & 2 deletions tests/test_extensions.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from unittest.mock import MagicMock, patch

import pytest
from scrapy.exceptions import NotConfigured
from scrapy.exceptions import CloseSpider

from src.scrapy_sentry_errors.extensions import Errors


Expand All @@ -18,7 +20,7 @@ def test_initialization_with_valid_dsn(crawler_mock):

def test_initialization_fails_without_dsn(crawler_mock):
crawler_mock.settings = {"SENTRY_DSN": None}
with pytest.raises(NotConfigured):
with pytest.raises(CloseSpider):
Errors.from_crawler(crawler_mock)


Expand Down
Loading