Skip to content

Commit

Permalink
Bump minimum tested Python version to 3.10 and use pyupgrade in pre-c…
Browse files Browse the repository at this point in the history
…ommit

- [pre-commit.ci] auto fixes from pre-commit.com hooks
  for more information, see https://pre-commit.ci

- pre-commit autoupdate
  • Loading branch information
ml-evs committed Jan 18, 2024
1 parent 5b2335f commit 5c67438
Show file tree
Hide file tree
Showing 17 changed files with 37 additions and 34 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ jobs:
fail-fast: false
max-parallel: 2
matrix:
python-version: ["3.9", "3.10"]
python-version: ["3.10", "3.11"]

steps:
- uses: actions/checkout@v3
Expand Down
9 changes: 7 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ repos:
- id: mixed-line-ending

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.1.4"
rev: "v0.1.13"
hooks:
- id: ruff
args: [--fix]
Expand All @@ -36,8 +36,13 @@ repos:
- id: prettier
types_or: [javascript, jsx, vue, html, yaml]

- repo: https://github.com/asottile/pyupgrade
rev: v3.15.0
hooks:
- id: pyupgrade

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.6.1
rev: v1.8.0
hooks:
- id: mypy
additional_dependencies: ["types-all", "pydantic~=1.10"]
Expand Down
4 changes: 2 additions & 2 deletions pydatalab/pydatalab/apps/nmr/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def read_bruker_1d(
p_dic, p_data = ng.fileio.bruker.read_pdata(str(processed_data_dir)) # processing data

try:
with open(os.path.join(processed_data_dir, "title"), "r") as f:
with open(os.path.join(processed_data_dir, "title")) as f:
topspin_title = f.read()
except FileNotFoundError:
topspin_title = None
Expand Down Expand Up @@ -101,7 +101,7 @@ def read_topspin_txt(filename, sample_mass_mg=None, nscans=None):
LEFTRIGHT_REGEX = r"# LEFT = (-?\d+\.\d+) ppm. RIGHT = (-?\d+\.\d+) ppm\."
SIZE_REGEX = r"SIZE = (\d+)"

with open(filename, "r") as f:
with open(filename) as f:
header = "".join(itertools.islice(f, MAX_HEADER_LINES)) # read the first 10 lines
# print(header)

Expand Down
2 changes: 1 addition & 1 deletion pydatalab/pydatalab/apps/raman/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def load(self, location: str | Path) -> tuple[pd.DataFrame, dict, list[str]]:
if ext == ".txt":
try:
header = []
with open(location, "r", encoding="cp1252") as f:
with open(location, encoding="cp1252") as f:
for line in f:
if line.startswith("#"):
header.append(line)
Expand Down
2 changes: 1 addition & 1 deletion pydatalab/pydatalab/apps/tga/parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def parse_mt_mass_spec_ascii(path: Path) -> Dict[str, Union[pd.DataFrame, Dict]]
if not path.exists():
raise RuntimeError(f"Provided path does not exist: {path!r}")

with open(path, "r") as f:
with open(path) as f:
# Read start of file until all header keys have been found
max_header_lines = 8
reads = 0
Expand Down
4 changes: 2 additions & 2 deletions pydatalab/pydatalab/apps/xrd/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def parse_xrdml(filename: str) -> pd.DataFrame:
filename: The file to parse.
"""
with open(filename, "r") as f:
with open(filename) as f:
s = f.read()

start, end = getStartEnd(s) # extract first and last angle
Expand Down Expand Up @@ -70,7 +70,7 @@ def convertSinglePattern(
)
return outfn

with open(filename, "r") as f:
with open(filename) as f:
s = f.read()

print(f"Processing file {filename}")
Expand Down
2 changes: 1 addition & 1 deletion pydatalab/pydatalab/backups.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def create_backup(strategy: BackupStrategy) -> bool:
sftp = client.open_sftp()
try:
sftp.chdir(path=str(strategy.location))
except IOError:
except OSError:
sftp.mkdir(path=str(strategy.location))
sftp.chdir(path=str(strategy.location))

Expand Down
2 changes: 1 addition & 1 deletion pydatalab/pydatalab/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ class ServerConfig(BaseSettings):
"""A model that provides settings for deploying the API."""

SECRET_KEY: str = Field(
hashlib.sha512(((platform.platform() + str(platform.python_build)).encode())).hexdigest(),
hashlib.sha512((platform.platform() + str(platform.python_build)).encode()).hexdigest(),
description="The secret key to use for Flask. This value should be changed and/or loaded from an environment variable for production deployments.",
)

Expand Down
12 changes: 6 additions & 6 deletions pydatalab/pydatalab/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -256,7 +256,7 @@ def get_file_info_by_id(
{"_id": file_id, **get_default_permissions(user_only=False)}
)
if not file_info:
raise IOError(f"could not find file with id: {file_id} in db")
raise OSError(f"could not find file with id: {file_id} in db")

file_info = File(**file_info)

Expand Down Expand Up @@ -291,7 +291,7 @@ def update_uploaded_file(file, file_id, last_modified=None, size_bytes=None):
)

if not updated_file_entry:
raise IOError(f"Issue with db update uploaded file {file.name} id {file_id}")
raise OSError(f"Issue with db update uploaded file {file.name} id {file_id}")

updated_file_entry = File(**updated_file_entry)

Expand Down Expand Up @@ -422,7 +422,7 @@ def save_uploaded_file(
{"$push": {"file_ObjectIds": inserted_id}},
)
if sample_update_result.modified_count != 1:
raise IOError(
raise OSError(
f"db operation failed when trying to insert new file ObjectId into sample: {item_id}"
)

Expand Down Expand Up @@ -496,7 +496,7 @@ def add_file_from_remote_directory(

result = file_collection.insert_one(new_file_document.dict())
if not result.acknowledged:
raise IOError(f"db operation failed when trying to insert new file. Result: {result}")
raise OSError(f"db operation failed when trying to insert new file. Result: {result}")

inserted_id = result.inserted_id

Expand All @@ -521,7 +521,7 @@ def add_file_from_remote_directory(
{"$push": {"file_ObjectIds": inserted_id}},
)
if sample_update_result.modified_count != 1:
raise IOError(
raise OSError(
f"db operation failed when trying to insert new file ObjectId into sample: {item_id}"
)

Expand Down Expand Up @@ -562,7 +562,7 @@ def remove_file_from_sample(item_id: Union[str, ObjectId], file_id: Union[str, O
)

if sample_result.modified_count < 1:
raise IOError(
raise OSError(
f"Failed to remove {file_id!r} from item {item_id!r}. Result: {sample_result.raw_result}"
)

Expand Down
4 changes: 2 additions & 2 deletions pydatalab/pydatalab/models/cells.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,11 +82,11 @@ def add_missing_electrode_relationships(cls, values):

existing_parthood_relationship_ids = set()
if values.get("relationships") is not None:
existing_parthood_relationship_ids = set(
existing_parthood_relationship_ids = {
relationship.item_id
for relationship in values["relationships"]
if relationship.relation == RelationshipType.PARTHOOD
)
}
else:
values["relationships"] = []

Expand Down
4 changes: 2 additions & 2 deletions pydatalab/pydatalab/models/samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ def add_missing_synthesis_relationships(cls, values):
if values.get("synthesis_constituents") is not None:
existing_parent_relationship_ids = set()
if values.get("relationships") is not None:
existing_parent_relationship_ids = set(
existing_parent_relationship_ids = {
relationship.item_id or relationship.refcode
for relationship in values["relationships"]
if relationship.relation == RelationshipType.PARENT
)
}
else:
values["relationships"] = []

Expand Down
6 changes: 3 additions & 3 deletions pydatalab/pydatalab/models/traits.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,14 +45,14 @@ def add_missing_collection_relationships(cls, values):
from pydatalab.models.relationships import TypedRelationship

if values.get("collections") is not None:
new_ids = set(coll.immutable_id for coll in values["collections"])
new_ids = {coll.immutable_id for coll in values["collections"]}
existing_collection_relationship_ids = set()
if values.get("relationships") is not None:
existing_collection_relationship_ids = set(
existing_collection_relationship_ids = {
relationship.immutable_id
for relationship in values["relationships"]
if relationship.type == "collections"
)
}
else:
values["relationships"] = []

Expand Down
2 changes: 1 addition & 1 deletion pydatalab/pydatalab/routes/v0_1/collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def create_collection():

errors = []
if starting_members:
item_ids = set(d.get("item_id") for d in starting_members)
item_ids = {d.get("item_id") for d in starting_members}
if None in item_ids:
item_ids.remove(None)

Expand Down
6 changes: 3 additions & 3 deletions pydatalab/pydatalab/routes/v0_1/items.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,9 +669,9 @@ def get_item_data(item_id, load_blocks: bool = False):
return_dict = json.loads(doc.json(exclude_unset=True))

# create the files_data dictionary keyed by file ObjectId
files_data: Dict[ObjectId, Dict] = dict(
[(f["immutable_id"], f) for f in return_dict.get("files") or []]
)
files_data: Dict[ObjectId, Dict] = {
f["immutable_id"]: f for f in return_dict.get("files") or []
}

return jsonify(
{
Expand Down
4 changes: 2 additions & 2 deletions pydatalab/scripts/migrate_files_to_files_ObjectId_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@

result = file_collection.insert_one(new_file_document)
if not result.acknowledged:
raise IOError(f"db operation failed when trying to insert new file. Result: {result}")
raise OSError(f"db operation failed when trying to insert new file. Result: {result}")

inserted_id = result.inserted_id

Expand All @@ -89,6 +89,6 @@
{"sample_id": sample_id}, {"$push": {"file_ObjectIds": inserted_id}}
)
if sample_update_result.modified_count != 1:
raise IOError(
raise OSError(
f"mdb operation failed when trying to insert new file ObjectId into sample: {sample_id}"
)
2 changes: 1 addition & 1 deletion pydatalab/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def update_file(filename: str, sub_line: Tuple[str, str], strip: str | None = No
Modified from optimade-python-tools.
"""
with open(filename, "r") as handle:
with open(filename) as handle:
lines = [re.sub(sub_line[0], sub_line[1], line.rstrip(strip)) for line in handle]

with open(filename, "w") as handle:
Expand Down
4 changes: 1 addition & 3 deletions pydatalab/tests/apps/test_echem_block.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,7 @@ def test_filter_df_by_cycle_index(reduced_echem_dataframe):
cycle_lists = ([1, 2, 3], [4.0, 6.0, 10.0], [-1, 5, 2])
for cycle_list in cycle_lists:
filtered_df = filter_df_by_cycle_index(reduced_echem_dataframe, cycle_list)
assert set(int(i) for i in filtered_df["full cycle"]).issubset(
set(int(i) for i in cycle_list)
)
assert {int(i) for i in filtered_df["full cycle"]}.issubset({int(i) for i in cycle_list})


def test_plot(reduced_echem_dataframe):
Expand Down

0 comments on commit 5c67438

Please sign in to comment.