Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

autofixes for linting #366

Merged
merged 1 commit into from
Nov 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions alphadia/calibration/property.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def fit(self, dataframe: pd.DataFrame, plot: bool = False, **kwargs):
self.function.fit(input_values, target_value)
self.is_fitted = True
except Exception as e:
logging.error(f"Could not fit estimator {self.name}: {e}")
logging.exception(f"Could not fit estimator {self.name}: {e}")
GeorgWa marked this conversation as resolved.
Show resolved Hide resolved
return

if plot is True:
Expand Down Expand Up @@ -200,13 +200,13 @@ def predict(self, dataframe, inplace=True):
logging.warning(
f"{self.name} prediction was skipped as it has not been fitted yet"
)
return
return None

if not set(self.input_columns).issubset(dataframe.columns):
logging.warning(
f"{self.name} calibration was skipped as input column {self.input_columns} not found in dataframe"
)
return
return None

input_values = dataframe[self.input_columns].values

Expand Down
2 changes: 0 additions & 2 deletions alphadia/data/alpharaw.py
Original file line number Diff line number Diff line change
Expand Up @@ -318,8 +318,6 @@ def filter_spectra(self, **kwargs):
This function is implemented in the sub-class.
"""

pass

def jitclass(self):
return AlphaRawJIT(
self.cycle,
Expand Down
9 changes: 1 addition & 8 deletions alphadia/fdrexperimental.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,8 @@
# third party imports
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
from sklearn import model_selection
from torch import nn, optim
from torchmetrics.classification import BinaryAUROC
from tqdm import tqdm

Expand All @@ -30,7 +29,6 @@ class Classifier(ABC):
@abstractmethod
def fitted(self):
"""Return whether the classifier has been fitted."""
pass

@abstractmethod
def fit(self, x: np.array, y: np.array):
Expand All @@ -46,7 +44,6 @@ def fit(self, x: np.array, y: np.array):
Target values of shape (n_samples,) or (n_samples, n_classes).

"""
pass

@abstractmethod
def predict(self, x: np.array):
Expand All @@ -65,7 +62,6 @@ def predict(self, x: np.array):
Predicted class of shape (n_samples,).

"""
pass

@abstractmethod
def predict_proba(self, x: np.array):
Expand All @@ -84,7 +80,6 @@ def predict_proba(self, x: np.array):
Predicted class probabilities of shape (n_samples, n_classes).

"""
pass

@abstractmethod
def to_state_dict(self):
Expand All @@ -97,7 +92,6 @@ def to_state_dict(self):
state_dict : dict
State dict of the classifier.
"""
pass

@abstractmethod
def from_state_dict(self, state_dict: dict):
Expand All @@ -111,7 +105,6 @@ def from_state_dict(self, state_dict: dict):
State dict of the classifier.

"""
pass


class BinaryClassifier(Classifier):
Expand Down
1 change: 0 additions & 1 deletion alphadia/libtransform.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ class ProcessingStep:
def __init__(self) -> None:
"""Base class for processing steps. Each implementation must implement the `validate` and `forward` method.
Processing steps can be chained together in a ProcessingPipeline."""
pass

def __call__(self, *args: typing.Any) -> typing.Any:
"""Run the processing step on the input object."""
Expand Down
20 changes: 10 additions & 10 deletions alphadia/numba/fft.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@ def rfft2(x: np.array, s: None | tuple = None) -> np.array:
@overload(rfft2, fastmath=True)
def _(x, s=None):
if not isinstance(x, nb.types.Array):
return
return None

if x.ndim != 2:
return
return None

if x.dtype != nb.types.float32:
return
return None

def funcx_impl(x, s=None):
s, axes = ndshape_and_axes(x, s, (-2, -1))
Expand Down Expand Up @@ -98,13 +98,13 @@ def irfft2(x: np.array, s: None | tuple = None) -> np.array:
@overload(irfft2, fastmath=True)
def _(x, s=None):
if not isinstance(x, nb.types.Array):
return
return None

if x.ndim != 2:
return
return None

if x.dtype != nb.types.complex64:
return
return None

def funcx_impl(x, s=None):
s, axes = ndshape_and_axes(x, s, (-2, -1))
Expand Down Expand Up @@ -161,16 +161,16 @@ def convolve_fourier(dense, kernel):
@overload(convolve_fourier, fastmath=True)
def _(dense, kernel):
if not isinstance(dense, nb.types.Array):
return
return None

if not isinstance(kernel, nb.types.Array):
return
return None

if kernel.ndim != 2:
return
return None

if dense.ndim < 2:
return
return None

if dense.ndim == 2:

Expand Down
4 changes: 3 additions & 1 deletion alphadia/numba/fragments.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,9 @@ def get_ion_group_mapping(

score_group_intensity = np.zeros((len(ion_mz)), dtype=np.float32)

for precursor, mz, intensity in zip(ion_precursor, ion_mz, ion_intensity): # noqa: B905 ('strict' not supported by numba yet
for precursor, mz, intensity in zip(
ion_precursor, ion_mz, ion_intensity
): # ('strict' not supported by numba yet
# score_group_idx = precursor_group[precursor]

if len(grouped_mz) == 0 or np.abs(grouped_mz[-1] - mz) > EPSILON:
Expand Down
4 changes: 2 additions & 2 deletions alphadia/outputtransform.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def accumulate_frag_df(
raw_name, df = next(df_iterable, (None, None))
if df is None:
logger.warning(f"no frag file found for {raw_name}")
return
return None

df = prepare_df(df, self.psm_df, column=self.column)

Expand Down Expand Up @@ -886,7 +886,7 @@ def build_library(

if len(psm_df) == 0:
logger.warning("No precursors found, skipping library building")
return
return None

libbuilder = libtransform.MbrLibraryBuilder(
fdr=0.01,
Expand Down
2 changes: 0 additions & 2 deletions alphadia/peakgroup/kernel.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ def get_dense_matrix(self, verbose: bool = True):
mobility_resolution = np.mean(np.diff(self.dia_data.mobility_values[::-1]))

if verbose:
pass
logger.info(
f"Duty cycle consists of {rt_datapoints} frames, {rt_resolution:.2f} seconds cycle time"
)
Expand All @@ -189,7 +188,6 @@ def get_dense_matrix(self, verbose: bool = True):
mobility_sigma = self.determine_mobility_sigma(mobility_resolution)

if verbose:
pass
logger.info(
f"FWHM in RT is {self.fwhm_rt:.2f} seconds, sigma is {rt_sigma:.2f}"
)
Expand Down
4 changes: 2 additions & 2 deletions alphadia/peakgroup/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,7 @@ def build_candidates(
cycle_limits_list = np.zeros((peak_cycle_list.shape[0], 2), dtype="int32")

for candidate_rank, (scan_relative, cycle_relative) in enumerate(
zip(peak_scan_list, peak_cycle_list) # noqa: B905 ('strict' not supported by numba yet)
zip(peak_scan_list, peak_cycle_list) # ('strict' not supported by numba yet)
):
scan_limits_relative, cycle_limits_relative = numeric.symetric_limits_2d(
score,
Expand Down Expand Up @@ -738,7 +738,7 @@ def build_candidates(
peak_score_list,
scan_limits_list,
cycle_limits_list,
): # noqa: B905 ('strict' not supported by numba yet)
): # ('strict' not supported by numba yet)
# does not work anymore

scan_limits_absolute = numeric.wrap1(
Expand Down
8 changes: 4 additions & 4 deletions alphadia/peakgroup/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@ def assemble_isotope_mz(mono_mz, charge, isotope_intensity):
@overload(assemble_isotope_mz)
def _(mono_mz, charge, isotope_intensity):
if not isinstance(mono_mz, nb.types.Float):
return
return None

if not isinstance(charge, nb.types.Integer):
return
return None

if not isinstance(isotope_intensity, nb.types.Array):
return
return None

if isotope_intensity.ndim != 1:
return
return None

def funcx_impl(mono_mz, charge, isotope_intensity):
offset = np.arange(len(isotope_intensity)) * 1.0033548350700006 / charge
Expand Down
6 changes: 3 additions & 3 deletions alphadia/planning.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,9 @@ def __init__(
reporting.init_logging(self.output_folder)

logger.progress(" _ _ ___ ___ _ ")
logger.progress(" __ _| |_ __| |_ __ _| \_ _| /_\ ")
logger.progress(" / _` | | '_ \ ' \\/ _` | |) | | / _ \ ")
logger.progress(" \__,_|_| .__/_||_\__,_|___/___/_/ \_\\")
logger.progress(r" __ _| |_ __| |_ __ _| \_ _| /_\ ")
logger.progress(" / _` | | '_ \\ ' \\/ _` | |) | | / _ \\ ")
logger.progress(" \\__,_|_| .__/_||_\\__,_|___/___/_/ \\_\\")
logger.progress(" |_| ")
logger.progress("")

Expand Down
2 changes: 1 addition & 1 deletion alphadia/transferlearning/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,7 +994,7 @@ def finetune_ccs(self, psm_df: pd.DataFrame) -> pd.DataFrame:
logger.error(
"Failed to finetune CCS model. PSM dataframe does not contain mobility or ccs columns."
)
return
return None
if "ccs" not in psm_df.columns:
psm_df["ccs"] = mobility_to_ccs_for_df(psm_df, "mobility")
elif "mobility" not in psm_df.columns:
Expand Down
2 changes: 1 addition & 1 deletion alphadia/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
# alpha family imports
import alphatims.bruker
import alphatims.utils
import matplotlib.patches as patches
import numba as nb
import numpy as np

# third party imports
import pandas as pd
import torch
from matplotlib import patches

logger = logging.getLogger()

Expand Down
9 changes: 0 additions & 9 deletions alphadia/workflow/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,9 @@ def step(self, precursors_df: pd.DataFrame, fragments_df: pd.DataFrame):

"""

pass

@abstractmethod
def skip(self):
"""Record skipping of optimization. Can be overwritten with an empty method if there is no need to record skips."""
pass

def proceed_with_insufficient_precursors(self, precursors_df, fragments_df):
self.workflow.reporter.log_string(
Expand All @@ -80,7 +77,6 @@ def proceed_with_insufficient_precursors(self, precursors_df, fragments_df):
@abstractmethod
def plot(self):
"""Plots the progress of the optimization. Can be overwritten with an empty method if there is no need to plot the progress."""
pass

@abstractmethod
def _update_workflow():
Expand All @@ -92,7 +88,6 @@ def _update_workflow():
and FWHM_mobility

"""
pass

@abstractmethod
def _update_history():
Expand All @@ -107,7 +102,6 @@ def _update_history():
The filtered fragment dataframe for the search.

"""
pass


class AutomaticOptimizer(BaseOptimizer):
Expand Down Expand Up @@ -503,7 +497,6 @@ def _get_feature_value(


"""
pass


class TargetedOptimizer(BaseOptimizer):
Expand Down Expand Up @@ -611,11 +604,9 @@ def step(

def skip(self):
"""See base class."""
pass

def plot(self):
"""See base class"""
pass

def _update_workflow(self):
pass
Expand Down
33 changes: 31 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ version = {attr = "alphadia.__version__"}
alphadia = "alphadia.cli:run"

[tool.ruff]
extend-exclude = ["misc/.bumpversion.cfg"]
extend-exclude = ["misc/.bumpversion.cfg", "tests"]

[tool.ruff.lint]
select = [
Expand All @@ -76,10 +76,39 @@ select = [
"SIM",
# isort
"I",
#"ALL"
]

ignore = [
"D",
"ANN",
"SLF001", # Private member accessed TODO this needs to be fixed in alphabase

"E501", # Line too long (ruff wraps code, but not docstrings)
"B028", # No explicit `stacklevel` keyword argument found (for warnings)
"B905" # This causes problems in numba code: `zip()` without an explicit `strict=` parameter
"B905", # This causes problems in numba code: `zip()` without an explicit `strict=` parameter
"COM812", #may cause conflicts when used with the formatter
"ISC001", #may cause conflicts when used with the formatter
"D211", # no-blank-line-before-class
"D213", # multi-line-summary-second-line
"S101", # Use of `assert` detected
"INP001", # implicit namespace package.
"ERA001", # Found commented-out code
"D203", # 1 blank line required before class docstring
"TD002", "TD003", "FIX002", # things around TO-DO
"PT011", #pytest.raises(ValueError) is too broad
"G004", "EM102", # Logging statement uses f-string
"TRY003", # Avoid specifying long messages outside the exception class
"ANN101", # Missing type annotation for `self` in method
"ANN102", # Missing type annotation for `cls` in classmethod
"ANN002", # Missing type annotation for `*args`
"ANN003", # Missing type annotation for `**kwargs
"FA102", # Missing `from __future__ import annotations
"EM101", # Exception must not use a string literal, assign to variable first
"D104", # Missing docstring in public package
"ANN204", # Missing return type annotation for special method `__init__`
"D401", # First line of docstring should be in imperative mood
"B023", # Function definition does not bind loop variable
"PD901", # Avoid using the generic variable name `df` for DataFrames"
"TCH003", # Move standard library import into a type-checking block
]
2 changes: 1 addition & 1 deletion tests/performance_tests/1_brunner_2022_1ng_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
try:
test_dir = os.environ["TEST_DATA_DIR"]
except KeyError:
logging.error("TEST_DATA_DIR environtment variable not set")
logging.exception("TEST_DATA_DIR environtment variable not set")
raise KeyError from None

logging.info(f"Test data directory: {test_dir}")
Expand Down
Loading
Loading