From ca61829562404a6467230493ad60943724db3150 Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Wed, 7 Feb 2024 10:00:06 +0100 Subject: [PATCH 1/6] fix: On Windows, updated files would not overwrite old ones in certain cases --- modules/updaters/ChromeOS.py | 9 +++++++-- modules/updaters/FreeDOS.py | 9 +++++++-- modules/updaters/MemTest86Plus.py | 9 +++++++-- modules/updaters/SuperGrub2.py | 9 +++++++-- 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/modules/updaters/ChromeOS.py b/modules/updaters/ChromeOS.py index 916e472..28e7582 100644 --- a/modules/updaters/ChromeOS.py +++ b/modules/updaters/ChromeOS.py @@ -1,6 +1,6 @@ -from functools import cache import os import zipfile +from functools import cache import requests @@ -92,7 +92,12 @@ def install_latest_version(self) -> None: ) extracted_file = z.extract(to_extract, path=os.path.dirname(new_file)) - os.rename(extracted_file, new_file) + try: + os.rename(extracted_file, new_file) + except FileExistsError: + # On Windows, files are not overwritten by default, so we need to remove the old file first + os.remove(new_file) + os.rename(extracted_file, new_file) os.remove(archive_path) if local_file: diff --git a/modules/updaters/FreeDOS.py b/modules/updaters/FreeDOS.py index adcb12a..6d93273 100644 --- a/modules/updaters/FreeDOS.py +++ b/modules/updaters/FreeDOS.py @@ -1,8 +1,8 @@ -from functools import cache import glob import os import re import zipfile +from functools import cache import requests from bs4 import BeautifulSoup @@ -132,7 +132,12 @@ def install_latest_version(self) -> None: ) extracted_file = z.extract(to_extract, path=os.path.dirname(new_file)) - os.rename(extracted_file, new_file.replace("[[EXT]]", file_ext)) + try: + os.rename(extracted_file, new_file.replace("[[EXT]]", file_ext)) + except FileExistsError: + # On Windows, files are not overwritten by default, so we need to remove the old file first + os.remove(new_file) + os.rename(extracted_file, new_file.replace("[[EXT]]", file_ext)) os.remove(archive_path) if local_file: diff --git a/modules/updaters/MemTest86Plus.py b/modules/updaters/MemTest86Plus.py index 73e26aa..6069a23 100644 --- a/modules/updaters/MemTest86Plus.py +++ b/modules/updaters/MemTest86Plus.py @@ -1,6 +1,6 @@ -from functools import cache import os import zipfile +from functools import cache import requests from bs4 import BeautifulSoup @@ -120,7 +120,12 @@ def install_latest_version(self) -> None: os.remove(local_file) # type: ignore os.remove(archive_path) - os.rename(extracted_file, new_file) + try: + os.rename(extracted_file, new_file) + except FileExistsError: + # On Windows, files are not overwritten by default, so we need to remove the old file first + os.remove(new_file) + os.rename(extracted_file, new_file) @cache def _get_latest_version(self) -> list[str]: diff --git a/modules/updaters/SuperGrub2.py b/modules/updaters/SuperGrub2.py index 62bf4f7..81dbba0 100644 --- a/modules/updaters/SuperGrub2.py +++ b/modules/updaters/SuperGrub2.py @@ -1,6 +1,6 @@ -from functools import cache import os import zipfile +from functools import cache import requests from bs4 import BeautifulSoup @@ -105,7 +105,12 @@ def install_latest_version(self) -> None: os.remove(local_file) # type: ignore os.remove(archive_path) - os.rename(extracted_file, new_file) + try: + os.rename(extracted_file, new_file) + except FileExistsError: + # On Windows, files are not overwritten by default, so we need to remove the old file first + os.remove(new_file) + os.rename(extracted_file, new_file) @cache def _get_latest_version(self) -> list[str]: From 794bd6068b7aa3bb3663ca1554055c13f1c047b9 Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Wed, 7 Feb 2024 10:43:49 +0100 Subject: [PATCH 2/6] feat: Improve debugging information --- modules/updaters/GenericUpdater.py | 26 +++++++++++++++++++++++++ modules/updaters/util_update_checker.py | 10 ++++++++-- modules/utils.py | 5 +++++ 3 files changed, 39 insertions(+), 2 deletions(-) diff --git a/modules/updaters/GenericUpdater.py b/modules/updaters/GenericUpdater.py index b652f93..7aa5eee 100644 --- a/modules/updaters/GenericUpdater.py +++ b/modules/updaters/GenericUpdater.py @@ -29,6 +29,9 @@ def __init__(self, file_path: str, *args, **kwargs) -> None: self.version_splitter = "." if self.has_edition(): + logging.debug( + f"[GenericUpdater.__init__] {self.__class__.__name__} has edition support" + ) if self.edition.lower() not in ( # type: ignore valid_edition.lower() for valid_edition in self.valid_editions # type: ignore ): @@ -37,6 +40,9 @@ def __init__(self, file_path: str, *args, **kwargs) -> None: ) if self.has_lang(): + logging.debug( + f"[GenericUpdater.__init__] {self.__class__.__name__} has language support" + ) if self.lang.lower() not in ( # type: ignore valid_lang.lower() for valid_lang in self.valid_langs # type: ignore ): @@ -77,6 +83,9 @@ def check_for_updates(self) -> bool: bool: True if updates are available, False if the local version is up to date. """ if not (local_version := self._get_local_version()): + logging.debug( + f"[GenericUpdater.check_for_updates] No local version found for {self.__class__.__name__}" + ) return True is_update_available = self._compare_version_numbers( @@ -104,6 +113,9 @@ def install_latest_version(self) -> None: if not versioning_flag: # If the file is being replaced, back it up if old_file: + logging.debug( + f"[GenericUpdater.install_latest_version] Renaming old file: {old_file}" + ) old_file += ".old" os.replace(self.file_path, old_file) @@ -132,6 +144,9 @@ def install_latest_version(self) -> None: # If the installation was successful and we had a previous version installed, remove it if old_file: + logging.debug( + f"[GenericUpdater.install_latest_version] Removing old file: {old_file}" + ) os.remove(old_file) def has_edition(self) -> bool: @@ -178,6 +193,9 @@ def _get_local_file(self) -> str | None: if local_files: return local_files[0] + logging.debug( + f"[GenericUpdater._get_local_file] No local file found for {self.__class__.__name__}" + ) return None def _get_local_version(self) -> list[str] | None: @@ -193,6 +211,9 @@ def _get_local_version(self) -> list[str] | None: local_file = self._get_local_file() if not local_file or "[[VER]]" not in self.file_path: + logging.debug( + f"[GenericUpdater._get_local_version] No local version found for {self.__class__.__name__}" + ) return None normalized_path_without_ext: str = os.path.splitext( @@ -212,6 +233,11 @@ def _get_local_version(self) -> list[str] | None: if local_version_regex: local_version = self._str_to_version(local_version_regex.group(1)) + if not local_version: + logging.debug( + f"[GenericUpdater._get_local_version] No local version found for {self.__class__.__name__}" + ) + return local_version def _get_latest_version(self) -> list[str]: diff --git a/modules/updaters/util_update_checker.py b/modules/updaters/util_update_checker.py index 3293dda..e839f7c 100644 --- a/modules/updaters/util_update_checker.py +++ b/modules/updaters/util_update_checker.py @@ -1,4 +1,6 @@ +import logging from functools import cache + import requests @@ -13,12 +15,14 @@ def github_get_latest_version(owner: str, repo: str) -> dict: Returns: dict: the full release information """ - res = {} - api_url = f"https://api.github.com/repos/{owner}/{repo}" + logging.debug(f"Fetching latest release from {api_url}") + release = requests.get(f"{api_url}/releases/latest").json() + logging.debug(f"GitHub release fetched from {api_url}: {release}") + return release @@ -41,4 +45,6 @@ def parse_github_release(release: dict) -> dict: for asset in release["assets"]: res["files"][asset["name"]] = asset["browser_download_url"] + logging.debug(f"GitHub release parsed: {res}") + return res diff --git a/modules/utils.py b/modules/utils.py index b0e2a2c..f594237 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -57,9 +57,11 @@ def parse_config_from_dict(input_dict: dict): del value continue if "directory" in value: + logging.debug(f"Found directory {value['directory']}") new_key = value["directory"] del value["directory"] else: + logging.debug(f"Found module {key}") new_key = key new_dict[new_key] = parse_config_from_dict(value) elif key == "enabled": @@ -215,6 +217,9 @@ def parse_hash( Returns: The extracted hash value. """ + logging.debug( + f"[parse_hash] Parsing hashes with match strings `{match_strings_in_line}` and hash position {hash_position_in_line} in those hashes:\n{hashes}" + ) return next( line.split()[hash_position_in_line] for line in hashes.strip().splitlines() From 4df00fe378d09b5a3cb610865ab1be3ddf8f9a63 Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Wed, 7 Feb 2024 11:09:45 +0100 Subject: [PATCH 3/6] chore: Remove unused imports --- sisou.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sisou.py b/sisou.py index 2517385..79542d0 100644 --- a/sisou.py +++ b/sisou.py @@ -1,13 +1,13 @@ -from abc import ABCMeta import argparse -from functools import cache import logging import os +from abc import ABCMeta +from functools import cache from typing import Type import modules.updaters from modules.updaters import GenericUpdater -from modules.utils import logging_critical_exception, parse_config +from modules.utils import parse_config @cache From 2d4a07b0874c3a93c75bcfe352314dc2b5bc39ff Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Wed, 7 Feb 2024 15:08:35 +0100 Subject: [PATCH 4/6] refactor: Use pathlib to manipulate paths --- modules/updaters/ArchLinux.py | 10 ++-- modules/updaters/ChromeOS.py | 24 ++++----- modules/updaters/Clonezilla.py | 6 +-- modules/updaters/Debian.py | 22 +++++---- modules/updaters/Fedora.py | 6 +-- modules/updaters/FreeDOS.py | 35 ++++++++------ modules/updaters/GPartedLive.py | 6 +-- modules/updaters/GenericUpdater.py | 78 +++++++++++++++++------------- modules/updaters/HDAT2.py | 9 ++-- modules/updaters/HirensBootCDPE.py | 6 +-- modules/updaters/KaliLinux.py | 27 ++++++----- modules/updaters/LinuxMint.py | 10 ++-- modules/updaters/Manjaro.py | 10 ++-- modules/updaters/MemTest86Plus.py | 24 ++++----- modules/updaters/OpenSUSE.py | 6 +-- modules/updaters/Rescuezilla.py | 10 ++-- modules/updaters/RockyLinux.py | 8 +-- modules/updaters/ShredOS.py | 6 +-- modules/updaters/SuperGrub2.py | 24 ++++----- modules/updaters/SystemRescue.py | 12 ++--- modules/updaters/Tails.py | 9 ++-- modules/updaters/TempleOS.py | 8 +-- modules/updaters/TrueNAS.py | 6 +-- modules/updaters/Ubuntu.py | 6 +-- modules/updaters/UltimateBootCD.py | 6 +-- modules/updaters/Windows10.py | 8 +-- modules/updaters/Windows11.py | 6 +-- modules/utils.py | 42 ++++++++-------- sisou.py | 33 +++++++------ 29 files changed, 246 insertions(+), 217 deletions(-) diff --git a/modules/updaters/ArchLinux.py b/modules/updaters/ArchLinux.py index d727b7e..6575468 100644 --- a/modules/updaters/ArchLinux.py +++ b/modules/updaters/ArchLinux.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -25,8 +25,8 @@ class ArchLinux(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -51,7 +51,9 @@ def check_integrity(self) -> bool: sha256_sums = requests.get(sha256_url).text sha256_sum = parse_hash( - sha256_sums, [self._get_complete_normalized_file_path(absolute=False)], 0 + sha256_sums, + [str(self._get_complete_normalized_file_path(absolute=False))], + 0, ) return sha256_hash_check( diff --git a/modules/updaters/ChromeOS.py b/modules/updaters/ChromeOS.py index 28e7582..e5299cf 100644 --- a/modules/updaters/ChromeOS.py +++ b/modules/updaters/ChromeOS.py @@ -1,6 +1,6 @@ -import os import zipfile from functools import cache +from pathlib import Path import requests @@ -26,11 +26,11 @@ class ChromeOS(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["ltc", "ltr", "stable"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = Path(folder_path) / FILE_NAME super().__init__(file_path) self.chromium_releases_info: list[dict] = requests.get( @@ -51,7 +51,7 @@ def check_integrity(self) -> bool: sha1_sum = self.cur_edition_info["sha1"] return sha1_hash_check( - self._get_complete_normalized_file_path(absolute=True) + ".zip", + self._get_complete_normalized_file_path(absolute=True).with_suffix(".zip"), sha1_sum, ) @@ -66,7 +66,7 @@ def install_latest_version(self) -> None: new_file = self._get_complete_normalized_file_path(absolute=True) - archive_path = f"{new_file}.zip" + archive_path = Path(new_file).with_suffix(".zip") local_file = self._get_local_file() @@ -80,7 +80,7 @@ def install_latest_version(self) -> None: ) from e if not integrity_check: - os.remove(archive_path) + archive_path.unlink() raise IntegrityCheckError("Integrity check failed: Hashes do not match") with zipfile.ZipFile(archive_path) as z: @@ -91,17 +91,17 @@ def install_latest_version(self) -> None: file for file in file_list if file.lower().endswith(file_ext) ) - extracted_file = z.extract(to_extract, path=os.path.dirname(new_file)) + extracted_file = Path(z.extract(to_extract, path=new_file.parent)) try: - os.rename(extracted_file, new_file) + extracted_file.rename(new_file) except FileExistsError: # On Windows, files are not overwritten by default, so we need to remove the old file first - os.remove(new_file) - os.rename(extracted_file, new_file) + new_file.unlink() + extracted_file.rename(new_file) - os.remove(archive_path) + archive_path.unlink() if local_file: - os.remove(local_file) # type: ignore + local_file.unlink() @cache def _get_latest_version(self) -> list[str]: diff --git a/modules/updaters/Clonezilla.py b/modules/updaters/Clonezilla.py index 08cdfc4..a5a4baa 100644 --- a/modules/updaters/Clonezilla.py +++ b/modules/updaters/Clonezilla.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -20,8 +20,8 @@ class Clonezilla(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) @cache diff --git a/modules/updaters/Debian.py b/modules/updaters/Debian.py index a33ae07..ebda5cc 100644 --- a/modules/updaters/Debian.py +++ b/modules/updaters/Debian.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -28,7 +28,7 @@ class Debian(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = [ "cinnamon", "gnome", @@ -42,7 +42,7 @@ def __init__(self, folder_path: str, edition: str) -> None: self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions @@ -77,7 +77,9 @@ def check_integrity(self) -> bool: sha256_sums = requests.get(sha256_url).text sha256_sum = parse_hash( - sha256_sums, [self._get_complete_normalized_file_path(absolute=False)], 0 + sha256_sums, + [str(self._get_complete_normalized_file_path(absolute=False))], + 0, ) return sha256_hash_check( @@ -94,11 +96,13 @@ def _get_latest_version(self) -> list[str]: latest = next( href for a_tag in download_a_tags - if self._get_normalized_file_path( - absolute=False, - version=None, - edition=self.edition if self.has_edition() else None, # type: ignore - lang=self.lang if self.has_lang() else None, # type: ignore + if str( + self._get_normalized_file_path( + absolute=False, + version=None, + edition=self.edition if self.has_edition() else None, # type: ignore + lang=self.lang if self.has_lang() else None, # type: ignore + ) ).split("[[VER]]")[-1] in (href := a_tag.get("href")) ) diff --git a/modules/updaters/Fedora.py b/modules/updaters/Fedora.py index 95be244..c9935b6 100644 --- a/modules/updaters/Fedora.py +++ b/modules/updaters/Fedora.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -27,7 +27,7 @@ class Fedora(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = [ "Budgie", "Cinnamon", @@ -41,7 +41,7 @@ def __init__(self, folder_path: str, edition: str) -> None: ] self.edition = edition - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions diff --git a/modules/updaters/FreeDOS.py b/modules/updaters/FreeDOS.py index 6d93273..bf16709 100644 --- a/modules/updaters/FreeDOS.py +++ b/modules/updaters/FreeDOS.py @@ -1,8 +1,9 @@ import glob -import os +import logging import re import zipfile from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -28,7 +29,7 @@ class FreeDOS(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = [ "BonusCD", "FloppyEdition", @@ -39,7 +40,7 @@ def __init__(self, folder_path: str, edition: str) -> None: ] self.edition = edition - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions @@ -85,7 +86,7 @@ def check_integrity(self) -> bool: return sha256_hash_check( self._get_normalized_file_path( True, self._get_latest_version(), self.edition - ).replace("[[EXT]]", "zip"), + ).with_suffix(".zip"), sha256_sum, ) @@ -99,7 +100,7 @@ def install_latest_version(self) -> None: download_link = self._get_download_link() new_file = self._get_complete_normalized_file_path(absolute=True) - archive_path = new_file.replace("[[EXT]]", "zip") + archive_path = new_file.with_suffix(".zip") local_file = self._get_local_file() @@ -115,7 +116,7 @@ def install_latest_version(self) -> None: ) from e if not integrity_check: - os.remove(archive_path) + archive_path.unlink() raise IntegrityCheckError("Integrity check failed: Hashes do not match") with zipfile.ZipFile(archive_path) as z: @@ -131,31 +132,35 @@ def install_latest_version(self) -> None: file for file in file_list if file.upper().endswith(file_ext) ) - extracted_file = z.extract(to_extract, path=os.path.dirname(new_file)) + extracted_file = Path(z.extract(to_extract, path=new_file.parent)) try: - os.rename(extracted_file, new_file.replace("[[EXT]]", file_ext)) + extracted_file.rename(new_file.with_suffix(file_ext)) except FileExistsError: # On Windows, files are not overwritten by default, so we need to remove the old file first - os.remove(new_file) - os.rename(extracted_file, new_file.replace("[[EXT]]", file_ext)) + new_file.unlink() + extracted_file.rename(new_file.with_suffix(file_ext)) - os.remove(archive_path) + archive_path.unlink() if local_file: os.remove(local_file) # type: ignore - def _get_local_file(self) -> str | None: + def _get_local_file(self) -> Path | None: file_path = self._get_normalized_file_path( absolute=True, version=None, - edition=self.edition, + edition=self.edition if self.has_edition() else None, # type: ignore + lang=self.lang if self.has_lang() else None, # type: ignore ) local_files = glob.glob( - file_path.replace("[[VER]]", "*").replace("[[EXT]]", "*") + str(file_path.with_suffix(".*")).replace("[[VER]]", "*") ) if local_files: - return local_files[0] + return Path(local_files[0]) + logging.debug( + f"[FreeDOS._get_local_file] No local file found for {self.__class__.__name__}" + ) return None @cache diff --git a/modules/updaters/GPartedLive.py b/modules/updaters/GPartedLive.py index 8aa6eb6..17bfcc9 100644 --- a/modules/updaters/GPartedLive.py +++ b/modules/updaters/GPartedLive.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests @@ -19,8 +19,8 @@ class GPartedLive(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.checksum_file: str = requests.get( diff --git a/modules/updaters/GenericUpdater.py b/modules/updaters/GenericUpdater.py index 7aa5eee..e757985 100644 --- a/modules/updaters/GenericUpdater.py +++ b/modules/updaters/GenericUpdater.py @@ -1,8 +1,8 @@ import glob import logging -import os import re from abc import ABC, abstractmethod +from pathlib import Path from modules.exceptions import IntegrityCheckError from modules.utils import download_file @@ -13,18 +13,18 @@ class GenericUpdater(ABC): Abstract base class for a generic updater that manages software updates. Attributes: - file_path (str): The path to the file that needs to be updated. + file_path (Path): The path to the file that needs to be updated. """ - def __init__(self, file_path: str, *args, **kwargs) -> None: + def __init__(self, file_path: Path, *args, **kwargs) -> None: """ Initialize the GenericUpdater instance. Args: - file_path (str): The path to the file that needs to be updated. + file_path (Path): The path to the file that needs to be updated. """ - self.file_path = os.path.abspath(file_path) - self.folder_path = os.path.dirname(file_path) + self.file_path = file_path.resolve() + self.folder_path = file_path.parent.resolve() self.version_splitter = "." @@ -50,7 +50,7 @@ def __init__(self, file_path: str, *args, **kwargs) -> None: f"Invalid language. The available languages are: {', '.join(self.valid_langs)}." # type: ignore ) - os.makedirs(self.folder_path, exist_ok=True) + self.folder_path.mkdir(parents=True, exist_ok=True) @abstractmethod def _get_download_link(self) -> str: @@ -104,20 +104,18 @@ def install_latest_version(self) -> None: IntegrityCheckError: If the integrity check of the downloaded file fails. """ download_link = self._get_download_link() - versioning_flag: bool = "[[VER]]" in self.file_path # Determine the old and new file paths old_file = self._get_local_file() new_file = self._get_complete_normalized_file_path(absolute=True) - if not versioning_flag: + if not self.has_version(): # If the file is being replaced, back it up if old_file: logging.debug( f"[GenericUpdater.install_latest_version] Renaming old file: {old_file}" ) - old_file += ".old" - os.replace(self.file_path, old_file) + old_file.with_suffix(".old").replace(old_file) download_file(download_link, new_file) @@ -126,20 +124,20 @@ def install_latest_version(self) -> None: integrity_check = self.check_integrity() except Exception as e: # If integrity check failed, restore the old file or remove the new file - if versioning_flag or not old_file: - os.remove(new_file) + if self.has_version() or not old_file: + new_file.unlink() else: - os.replace(old_file, new_file) + old_file.replace(new_file) raise IntegrityCheckError( "Integrity check failed: An error occurred" ) from e if not integrity_check: # If integrity check failed, restore the old file or remove the new file - if versioning_flag or not old_file: - os.remove(new_file) + if self.has_version() or not old_file: + new_file.unlink() else: - os.replace(old_file, new_file) + old_file.replace(new_file) raise IntegrityCheckError("Integrity check failed: Hashes do not match") # If the installation was successful and we had a previous version installed, remove it @@ -147,7 +145,16 @@ def install_latest_version(self) -> None: logging.debug( f"[GenericUpdater.install_latest_version] Removing old file: {old_file}" ) - os.remove(old_file) + old_file.unlink() + + def has_version(self) -> bool: + """ + Check if the updater supports different versions. + + Returns: + bool: True if different versions are supported, False otherwise. + """ + return "[[VER]]" in str(self.file_path) def has_edition(self) -> bool: """ @@ -159,7 +166,7 @@ def has_edition(self) -> bool: return ( hasattr(self, "edition") and hasattr(self, "valid_editions") - and "[[EDITION]]" in self.file_path + and "[[EDITION]]" in str(self.file_path) ) def has_lang(self) -> bool: @@ -172,10 +179,10 @@ def has_lang(self) -> bool: return ( hasattr(self, "lang") and hasattr(self, "valid_langs") - and "[[LANG]]" in self.file_path + and "[[LANG]]" in str(self.file_path) ) - def _get_local_file(self) -> str | None: + def _get_local_file(self) -> Path | None: """ Get the path of the locally stored file that matches the filename pattern. @@ -189,10 +196,10 @@ def _get_local_file(self) -> str | None: lang=self.lang if self.has_lang() else None, # type: ignore ) - local_files = glob.glob(file_path.replace("[[VER]]", "*")) + local_files = glob.glob(str(file_path).replace("[[VER]]", "*")) if local_files: - return local_files[0] + return Path(local_files[0]) logging.debug( f"[GenericUpdater._get_local_file] No local file found for {self.__class__.__name__}" ) @@ -210,25 +217,26 @@ def _get_local_version(self) -> list[str] | None: local_file = self._get_local_file() - if not local_file or "[[VER]]" not in self.file_path: + if not local_file or not self.has_version(): logging.debug( f"[GenericUpdater._get_local_version] No local version found for {self.__class__.__name__}" ) return None - normalized_path_without_ext: str = os.path.splitext( + normalized_path_without_ext = Path( self._get_normalized_file_path( absolute=True, version=None, edition=self.edition if self.has_edition() else None, # type: ignore lang=self.lang if self.has_lang() else None, # type: ignore ) - )[0] + ).with_suffix("") version_regex: str = r"(.+)".join( - re.escape(part) for part in normalized_path_without_ext.split("[[VER]]") + re.escape(part) + for part in str(normalized_path_without_ext).split("[[VER]]") ) - local_version_regex = re.search(version_regex, local_file) + local_version_regex = re.search(version_regex, str(local_file)) if local_version_regex: local_version = self._str_to_version(local_version_regex.group(1)) @@ -260,7 +268,7 @@ def _get_normalized_file_path( version: list[str] | None = None, edition: str | None = None, lang: str | None = None, - ) -> str: + ) -> Path: """ Get the normalized file path with customizable version, edition, and language. @@ -275,13 +283,13 @@ def _get_normalized_file_path( Defaults to None. Returns: - str: The normalized file path. + Path: The normalized file path. Note: This method replaces placeholders such as '[[VER]]', '[[EDITION]]', and '[[LANG]]' in the file name with the specified version, edition, and language respectively. It also removes all spaces from the file name. """ - file_name: str = os.path.basename(self.file_path) + file_name: str = self.file_path.name # Replace placeholders with the specified version, edition, and language if version is not None and "[[VER]]" in file_name: @@ -297,9 +305,11 @@ def _get_normalized_file_path( file_name = "".join(file_name.split()) # Return the absolute or relative file path based on the 'absolute' parameter - return os.path.join(self.folder_path, file_name) if absolute else file_name + return self.folder_path / file_name if absolute else Path(file_name) - def _get_complete_normalized_file_path(self, absolute: bool, latest: bool = True): + def _get_complete_normalized_file_path( + self, absolute: bool, latest: bool = True + ) -> Path: """ Get the complete normalized file path with customizable version, edition, and language. @@ -310,7 +320,7 @@ def _get_complete_normalized_file_path(self, absolute: bool, latest: bool = True Defaults to True. Returns: - str: The normalized file path. + Path: The normalized file path. Note: This method replaces placeholders such as '[[VER]]', '[[EDITION]]', and '[[LANG]]' in the file name diff --git a/modules/updaters/HDAT2.py b/modules/updaters/HDAT2.py index d502c98..40e0526 100644 --- a/modules/updaters/HDAT2.py +++ b/modules/updaters/HDAT2.py @@ -1,13 +1,12 @@ from functools import cache -import os +from pathlib import Path +from urllib.parse import urljoin import requests from bs4 import BeautifulSoup from bs4.element import Tag from modules.updaters.GenericUpdater import GenericUpdater -from urllib.parse import urljoin - from modules.utils import md5_hash_check, parse_hash DOMAIN = "https://www.hdat2.com" @@ -29,7 +28,7 @@ class HDAT2(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["full", "lite", "diskette"] self.edition = edition.lower() @@ -40,7 +39,7 @@ def __init__(self, folder_path: str, edition: str) -> None: self.file_name = FILE_NAME.replace("[[EXT]]", extension) - file_path = os.path.join(folder_path, self.file_name) + file_path = folder_path / self.file_name super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) diff --git a/modules/updaters/HirensBootCDPE.py b/modules/updaters/HirensBootCDPE.py index 88755eb..05c682d 100644 --- a/modules/updaters/HirensBootCDPE.py +++ b/modules/updaters/HirensBootCDPE.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -26,8 +26,8 @@ class HirensBootCDPE(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) diff --git a/modules/updaters/KaliLinux.py b/modules/updaters/KaliLinux.py index b3b82e2..ddcefa6 100644 --- a/modules/updaters/KaliLinux.py +++ b/modules/updaters/KaliLinux.py @@ -1,5 +1,6 @@ from functools import cache -import os +from pathlib import Path +from urllib.parse import urljoin import requests from bs4 import BeautifulSoup @@ -7,7 +8,6 @@ from modules.exceptions import VersionNotFoundError from modules.updaters.GenericUpdater import GenericUpdater from modules.utils import parse_hash, sha256_hash_check -from urllib.parse import urljoin DOMAIN = "https://cdimage.kali.org" DOWNLOAD_PAGE_URL = urljoin(DOMAIN, "current/") @@ -28,7 +28,7 @@ class KaliLinux(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = [ "installer", "installer-netinst", @@ -37,7 +37,7 @@ def __init__(self, folder_path: str, edition: str) -> None: ] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -54,7 +54,8 @@ def __init__(self, folder_path: str, edition: str) -> None: @cache def _get_download_link(self) -> str: return urljoin( - DOWNLOAD_PAGE_URL, self._get_complete_normalized_file_path(absolute=False) + DOWNLOAD_PAGE_URL, + str(self._get_complete_normalized_file_path(absolute=False)), ) def check_integrity(self) -> bool: @@ -63,7 +64,9 @@ def check_integrity(self) -> bool: sha256_sums = requests.get(sha256_url).text sha256_sum = parse_hash( - sha256_sums, [self._get_complete_normalized_file_path(absolute=False)], 0 + sha256_sums, + [str(self._get_complete_normalized_file_path(absolute=False))], + 0, ) return sha256_hash_check( @@ -80,11 +83,13 @@ def _get_latest_version(self) -> list[str]: latest = next( href for a_tag in download_a_tags - if self._get_normalized_file_path( - absolute=False, - version=None, - edition=self.edition if self.has_edition() else None, # type: ignore - lang=self.lang if self.has_lang() else None, # type: ignore + if str( + self._get_normalized_file_path( + absolute=False, + version=None, + edition=self.edition if self.has_edition() else None, # type: ignore + lang=self.lang if self.has_lang() else None, # type: ignore + ) ).split("[[VER]]")[-1] in (href := a_tag.get("href")) ) diff --git a/modules/updaters/LinuxMint.py b/modules/updaters/LinuxMint.py index 8c7b6bc..7adf523 100644 --- a/modules/updaters/LinuxMint.py +++ b/modules/updaters/LinuxMint.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -27,11 +27,11 @@ class LinuxMint(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["cinnamon", "mate", "xfce"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -58,7 +58,9 @@ def check_integrity(self) -> bool: sha256_sums = requests.get(sha256_url).text sha256_sum = parse_hash( - sha256_sums, [self._get_complete_normalized_file_path(absolute=False)], 0 + sha256_sums, + [str(self._get_complete_normalized_file_path(absolute=False))], + 0, ) return sha256_hash_check( diff --git a/modules/updaters/Manjaro.py b/modules/updaters/Manjaro.py index 0f317e7..7997b2a 100644 --- a/modules/updaters/Manjaro.py +++ b/modules/updaters/Manjaro.py @@ -1,6 +1,6 @@ -from functools import cache -import os import re +from functools import cache +from pathlib import Path import requests @@ -9,8 +9,8 @@ from modules.utils import ( md5_hash_check, parse_hash, - sha512_hash_check, sha256_hash_check, + sha512_hash_check, ) DOMAIN = "https://gitlab.manjaro.org" @@ -31,7 +31,7 @@ class Manjaro(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = [ "plasma", "xfce", @@ -42,7 +42,7 @@ def __init__(self, folder_path: str, edition: str) -> None: "mate", ] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.file_info_json = requests.get(DOWNLOAD_PAGE_URL).json() diff --git a/modules/updaters/MemTest86Plus.py b/modules/updaters/MemTest86Plus.py index 6069a23..19ad0f4 100644 --- a/modules/updaters/MemTest86Plus.py +++ b/modules/updaters/MemTest86Plus.py @@ -1,6 +1,6 @@ -import os import zipfile from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -32,7 +32,7 @@ class MemTest86Plus(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: + def __init__(self, folder_path: Path) -> None: """ Initialize the MemTest86Plus updater. @@ -43,7 +43,7 @@ def __init__(self, folder_path: str) -> None: ConnectionError: If the download page could not be fetched successfully. DownloadLinkNotFoundError: If the card containing download information is not found. """ - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -85,7 +85,7 @@ def check_integrity(self) -> bool: sha_256_checksum: str = parse_hash(sha_256_checksums_str, ["64.iso"], 0) return sha256_hash_check( - self._get_complete_normalized_file_path(absolute=True) + ".zip", + self._get_complete_normalized_file_path(absolute=True).with_suffix(".zip"), sha_256_checksum, ) @@ -101,31 +101,31 @@ def install_latest_version(self) -> None: new_file = self._get_complete_normalized_file_path(absolute=True) - archive_path = f"{new_file}.zip" + archive_path = new_file.with_suffix(".zip") download_file(download_link, archive_path) local_file = self._get_local_file() if not self.check_integrity(): - os.remove(archive_path) + archive_path.unlink() raise IntegrityCheckError("Integrity check failed") with zipfile.ZipFile(archive_path) as z: file_list = z.namelist() iso = next(file for file in file_list if file.endswith(".iso")) - extracted_file = z.extract(iso, path=os.path.dirname(new_file)) + extracted_file = Path(z.extract(iso, path=new_file.parent)) if local_file: - os.remove(local_file) # type: ignore - os.remove(archive_path) + local_file.unlink() + archive_path.unlink() try: - os.rename(extracted_file, new_file) + extracted_file.rename(new_file) except FileExistsError: # On Windows, files are not overwritten by default, so we need to remove the old file first - os.remove(new_file) - os.rename(extracted_file, new_file) + new_file.unlink() + extracted_file.rename(new_file) @cache def _get_latest_version(self) -> list[str]: diff --git a/modules/updaters/OpenSUSE.py b/modules/updaters/OpenSUSE.py index 284719e..b52a9a0 100644 --- a/modules/updaters/OpenSUSE.py +++ b/modules/updaters/OpenSUSE.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests @@ -23,11 +23,11 @@ class OpenSUSE(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["leap", "leap-micro", "jump"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) @cache diff --git a/modules/updaters/Rescuezilla.py b/modules/updaters/Rescuezilla.py index 5fa1f0e..cc22ad6 100644 --- a/modules/updaters/Rescuezilla.py +++ b/modules/updaters/Rescuezilla.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests @@ -26,11 +26,11 @@ class Rescuezilla(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["focal", "jammy", "kinetic"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) release = github_get_latest_version("rescuezilla", "rescuezilla") @@ -49,7 +49,9 @@ def check_integrity(self) -> bool: sha256_sums = requests.get(sha256_url).text sha256_sum = parse_hash( - sha256_sums, [self._get_complete_normalized_file_path(absolute=False)], 0 + sha256_sums, + [str(self._get_complete_normalized_file_path(absolute=False))], + 0, ) return sha256_hash_check( diff --git a/modules/updaters/RockyLinux.py b/modules/updaters/RockyLinux.py index 03add7a..f7e1e8a 100644 --- a/modules/updaters/RockyLinux.py +++ b/modules/updaters/RockyLinux.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -27,11 +27,11 @@ class RockyLinux(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["dvd", "boot", "minimal"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -57,7 +57,7 @@ def check_integrity(self) -> bool: sha256_sum = parse_hash( sha256_sums, - [self._get_complete_normalized_file_path(absolute=False), "="], + [str(self._get_complete_normalized_file_path(absolute=False)), "="], -1, ) diff --git a/modules/updaters/ShredOS.py b/modules/updaters/ShredOS.py index 638b4df..7950316 100644 --- a/modules/updaters/ShredOS.py +++ b/modules/updaters/ShredOS.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path from modules.updaters.GenericUpdater import GenericUpdater from modules.updaters.util_update_checker import ( @@ -23,8 +23,8 @@ class ShredOS(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) release = github_get_latest_version("PartialVolume", "shredos.x86_64") diff --git a/modules/updaters/SuperGrub2.py b/modules/updaters/SuperGrub2.py index 81dbba0..0f53662 100644 --- a/modules/updaters/SuperGrub2.py +++ b/modules/updaters/SuperGrub2.py @@ -1,6 +1,6 @@ -import os import zipfile from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -31,8 +31,8 @@ class SuperGrub2(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -69,7 +69,7 @@ def _get_download_link(self) -> str: sourceforge_url = href_attributes[0].get("href") return sourceforge_url - def check_integrity(self, archive_to_check: str) -> bool: + def check_integrity(self, archive_to_check: Path) -> bool: sha256_sums_tag = self.soup_latest_download_article.find_all("pre") if not sha256_sums_tag: raise IntegrityCheckError("Couldn't find the SHA256 sum") @@ -86,31 +86,31 @@ def install_latest_version(self) -> None: new_file = self._get_complete_normalized_file_path(absolute=True) - archive_path = f"{new_file}.zip" + archive_path = new_file.with_suffix(".zip") download_file(download_link, archive_path) local_file = self._get_local_file() if not self.check_integrity(archive_path): - os.remove(archive_path) + archive_path.unlink() raise IntegrityCheckError("Integrity check failed") with zipfile.ZipFile(archive_path) as z: file_list = z.namelist() iso = next(file for file in file_list if file.endswith(".img")) - extracted_file = z.extract(iso, path=os.path.dirname(new_file)) + extracted_file = Path(z.extract(iso, path=new_file.parent)) if local_file: - os.remove(local_file) # type: ignore - os.remove(archive_path) + local_file.unlink() + archive_path.unlink() try: - os.rename(extracted_file, new_file) + extracted_file.rename(new_file) except FileExistsError: # On Windows, files are not overwritten by default, so we need to remove the old file first - os.remove(new_file) - os.rename(extracted_file, new_file) + new_file.unlink() + extracted_file.rename(new_file) @cache def _get_latest_version(self) -> list[str]: diff --git a/modules/updaters/SystemRescue.py b/modules/updaters/SystemRescue.py index c1e2640..79cfe76 100644 --- a/modules/updaters/SystemRescue.py +++ b/modules/updaters/SystemRescue.py @@ -1,6 +1,6 @@ -from functools import cache -import os import re +from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -27,17 +27,17 @@ class SystemRescue(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: + def __init__(self, folder_path: Path) -> None: """ Initialize a SystemRescue updater object. Args: - folder_path (str): The path to the folder where the SystemRescue file is stored. + folder_path (Path): The path to the folder where the SystemRescue file is stored. Raises: ConnectionError: If the download page cannot be fetched successfully. """ - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) @@ -73,7 +73,7 @@ def check_integrity(self) -> bool: r = requests.get(sha256_download_link) sha256_checksum = parse_hash( r.text, - [self._get_normalized_file_path(False, self._get_latest_version())], + [str(self._get_normalized_file_path(False, self._get_latest_version()))], 0, ) diff --git a/modules/updaters/Tails.py b/modules/updaters/Tails.py index c4c0256..c1e01d3 100644 --- a/modules/updaters/Tails.py +++ b/modules/updaters/Tails.py @@ -1,4 +1,5 @@ -import os +from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -6,8 +7,6 @@ from modules.exceptions import VersionNotFoundError from modules.updaters.GenericUpdater import GenericUpdater from modules.utils import pgp_check -from functools import cache - DOMAIN = "https://mirrors.edge.kernel.org" DOWNLOAD_PAGE_URL = f"{DOMAIN}/tails/stable" @@ -28,8 +27,8 @@ class Tails(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) diff --git a/modules/updaters/TempleOS.py b/modules/updaters/TempleOS.py index 03b2185..4301fb8 100644 --- a/modules/updaters/TempleOS.py +++ b/modules/updaters/TempleOS.py @@ -1,6 +1,6 @@ -from functools import cache -import os from datetime import datetime +from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -29,11 +29,11 @@ class TempleOS(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["Distro", "Lite"] self.edition = edition - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions diff --git a/modules/updaters/TrueNAS.py b/modules/updaters/TrueNAS.py index 37e0a0b..b5278f8 100644 --- a/modules/updaters/TrueNAS.py +++ b/modules/updaters/TrueNAS.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -26,11 +26,11 @@ class TrueNAS(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["core", "scale"] self.edition = edition.lower() - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page_url = DOWNLOAD_PAGE_URL.replace("[[EDITION]]", self.edition) diff --git a/modules/updaters/Ubuntu.py b/modules/updaters/Ubuntu.py index ca02c1f..9481e23 100644 --- a/modules/updaters/Ubuntu.py +++ b/modules/updaters/Ubuntu.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup @@ -27,11 +27,11 @@ class Ubuntu(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, edition: str) -> None: + def __init__(self, folder_path: Path, edition: str) -> None: self.valid_editions = ["LTS", "Interim"] self.edition = edition - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions diff --git a/modules/updaters/UltimateBootCD.py b/modules/updaters/UltimateBootCD.py index 524ddbd..d17f576 100644 --- a/modules/updaters/UltimateBootCD.py +++ b/modules/updaters/UltimateBootCD.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path from random import shuffle import requests @@ -34,8 +34,8 @@ class UltimateBootCD(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str) -> None: - file_path = os.path.join(folder_path, FILE_NAME) + def __init__(self, folder_path: Path) -> None: + file_path = folder_path / FILE_NAME super().__init__(file_path) self.download_page = requests.get(DOWNLOAD_PAGE_URL) diff --git a/modules/updaters/Windows10.py b/modules/updaters/Windows10.py index 077e794..6865a17 100644 --- a/modules/updaters/Windows10.py +++ b/modules/updaters/Windows10.py @@ -1,6 +1,6 @@ -from functools import cache import logging -import os +from functools import cache +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -27,7 +27,7 @@ class Windows10(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, lang: str) -> None: + def __init__(self, folder_path: Path, lang: str) -> None: self.valid_langs = [ "Arabic", "Brazilian Portuguese", @@ -69,7 +69,7 @@ def __init__(self, folder_path: str, lang: str) -> None: "Ukrainian", ] self.lang = lang - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions self.lang = next( diff --git a/modules/updaters/Windows11.py b/modules/updaters/Windows11.py index abc737c..6e54e7c 100644 --- a/modules/updaters/Windows11.py +++ b/modules/updaters/Windows11.py @@ -1,5 +1,5 @@ from functools import cache -import os +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -26,7 +26,7 @@ class Windows11(GenericUpdater): This class inherits from the abstract base class GenericUpdater. """ - def __init__(self, folder_path: str, lang: str) -> None: + def __init__(self, folder_path: Path, lang: str) -> None: self.valid_langs = [ "Arabic", "Brazilian Portuguese", @@ -68,7 +68,7 @@ def __init__(self, folder_path: str, lang: str) -> None: "Ukrainian", ] self.lang = lang - file_path = os.path.join(folder_path, FILE_NAME) + file_path = folder_path / FILE_NAME super().__init__(file_path) # Make the parameter case insensitive, and find back the correct case using valid_editions self.lang = next( diff --git a/modules/utils.py b/modules/utils.py index f594237..9af8111 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -1,11 +1,11 @@ import hashlib import logging -import os import re import shutil import tomllib import traceback import uuid +from pathlib import Path import requests from bs4 import BeautifulSoup, Tag @@ -27,11 +27,11 @@ def logging_critical_exception(msg, *args, **kwargs): logging.critical(f"{msg}\n{traceback.format_exc()}", *args, **kwargs) -def parse_config(toml_file: str) -> dict | None: +def parse_config(toml_file: Path) -> dict | None: """Parse a TOML configuration file and return a dictionary representation. Args: - toml_file (str): The path to the TOML configuration file. + toml_file (Path): The path to the TOML configuration file. Returns: dict | None: The parsed configuration as a dictionary, or None if there was an error during parsing. @@ -71,12 +71,12 @@ def parse_config_from_dict(input_dict: dict): return new_dict -def md5_hash_check(file: str, hash: str) -> bool: +def md5_hash_check(file: Path, hash: str) -> bool: """ Calculate the MD5 hash of a given file and compare it with a provided hash value. Args: - file (str): The path to the file for which the hash is to be calculated. + file (Path): The path to the file for which the hash is to be calculated. hash (str): The MD5 hash value to compare against the calculated hash. Returns: @@ -89,17 +89,17 @@ def md5_hash_check(file: str, hash: str) -> bool: result = hash.lower() == file_hash.hexdigest() logging.debug( - f"[md5_hash_check] {os.path.abspath(file)}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" + f"[md5_hash_check] {file.resolve()}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" ) return result -def sha1_hash_check(file: str, hash: str) -> bool: +def sha1_hash_check(file: Path, hash: str) -> bool: """ Calculate the SHA-1 hash of a given file and compare it with a provided hash value. Args: - file (str): The path to the file for which the hash is to be calculated. + file (Path): The path to the file for which the hash is to be calculated. hash (str): The SHA-1 hash value to compare against the calculated hash. Returns: @@ -112,12 +112,12 @@ def sha1_hash_check(file: str, hash: str) -> bool: result = hash.lower() == file_hash.hexdigest() logging.debug( - f"[sha1_hash_check] {os.path.abspath(file)}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" + f"[sha1_hash_check] {file.resolve()}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" ) return result -def sha256_hash_check(file: str, hash: str) -> bool: +def sha256_hash_check(file: Path, hash: str) -> bool: """ Calculate the SHA-256 hash of a given file and compare it with a provided hash value. @@ -135,17 +135,17 @@ def sha256_hash_check(file: str, hash: str) -> bool: result = hash.lower() == file_hash.hexdigest() logging.debug( - f"[sha256_hash_check] {os.path.abspath(file)}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" + f"[sha256_hash_check] {file.resolve()}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" ) return result -def sha512_hash_check(file: str, hash: str) -> bool: +def sha512_hash_check(file: Path, hash: str) -> bool: """ Calculate the SHA-512 hash of a given file and compare it with a provided hash value. Args: - file (str): The path to the file for which the hash is to be calculated. + file (Path): The path to the file for which the hash is to be calculated. hash (str): The SHA-512 hash value to compare against the calculated hash. Returns: @@ -158,16 +158,16 @@ def sha512_hash_check(file: str, hash: str) -> bool: result = hash.lower() == file_hash.hexdigest() logging.debug( - f"[sha512_hash_check] {os.path.abspath(file)}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" + f"[sha512_hash_check] {file.resolve()}: `{hash.lower()}` is {'' if result else 'not'} equal to file hash `{file_hash.hexdigest()}`" ) return result -def pgp_check(file_path: str, signature: str | bytes, public_key: str | bytes) -> bool: +def pgp_check(file_path: Path, signature: str | bytes, public_key: str | bytes) -> bool: """Verifies the signature of a file against a publick ey Args: - file_path (str): Path to the file to check + file_path (Path): Path to the file to check signature (str | bytes): Signature public_key (str | bytes): Public Key @@ -198,7 +198,7 @@ def pgp_check(file_path: str, signature: str | bytes, public_key: str | bytes) - result = bool(pub_key.verify(file_content, sig)) print( - f"[pgp_check] {os.path.abspath(file_path)}: Signature is {'' if result else 'not'} valid" + f"[pgp_check] {file_path.resolve()}: Signature is {'' if result else 'not'} valid" ) return result @@ -227,19 +227,19 @@ def parse_hash( ) -def download_file(url: str, local_file: str, progress_bar: bool = True) -> None: +def download_file(url: str, local_file: Path, progress_bar: bool = True) -> None: """ Download a file from a given URL and save it to the local file system. Args: url (str): The URL of the file to download. - local_file (str): The path where the downloaded file will be saved on the local file system. + local_file (Path): The path where the downloaded file will be saved on the local file system. progress_bar (bool): Whether to show a progress bar during the download (default: True). Returns: None """ - logging.debug(f"[download_file] Downloading {url} to {os.path.abspath(local_file)}") + logging.debug(f"[download_file] Downloading {url} to {local_file.resolve()}") with requests.get(url, stream=True) as r: total_size = int(r.headers.get("content-length", 0)) # Sizes in bytes @@ -248,7 +248,7 @@ def download_file(url: str, local_file: str, progress_bar: bool = True) -> None: with tqdm( total=total_size, unit="B", - desc=os.path.basename(local_file), + desc=local_file.name, ) as pbar: for chunk in r.iter_content(chunk_size=1024): if chunk: diff --git a/sisou.py b/sisou.py index 79542d0..53066bf 100644 --- a/sisou.py +++ b/sisou.py @@ -1,8 +1,8 @@ import argparse import logging -import os from abc import ABCMeta from functools import cache +from pathlib import Path from typing import Type import modules.updaters @@ -26,12 +26,12 @@ def get_available_updaters() -> list[Type[GenericUpdater]]: ] -def setup_logging(log_level: str, log_file: str | None): +def setup_logging(log_level: str, log_file: Path | None): """Set up logging configurations. Args: log_level (str): The log level. Valid choices: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL". - log_file (str | None): The path to the log file. If None, log to console. + log_file (Path | None): The path to the log file. If None, log to console. Raises: ValueError: If the log_level is invalid. @@ -73,12 +73,12 @@ def run_updater(updater: GenericUpdater): def run_updaters( - install_path: str, config: dict, updater_list: list[Type[GenericUpdater]] + install_path: Path, config: dict, updater_list: list[Type[GenericUpdater]] ): """Run updaters based on the provided configuration. Args: - install_path (str): The installation path. + install_path (Path): The installation path. config (dict): The configuration dictionary. updater_list (list[Type[GenericUpdater]]): A list of available updater classes. """ @@ -120,7 +120,7 @@ def run_updaters( run_updater(updater) else: - run_updaters(os.path.join(install_path, key), value, updater_list) + run_updaters(install_path / key, value, updater_list) def main(): @@ -151,29 +151,30 @@ def main(): args = parser.parse_args() - setup_logging(args.log_level, args.log_file) + log_file = Path(args.log_file) if args.log_file else None + setup_logging(args.log_level, log_file) - config_file = args.config_file + ventoy_path = Path(args.ventoy_path).resolve() + + config_file = Path(args.config_file) if args.config_file else None if not config_file: logging.info( "No config file specified. Trying to find config.toml in the current directory..." ) - config_file = os.path.join(os.getcwd(), "config.toml") + config_file = Path() / "config.toml" - if not os.path.isfile(config_file): + if not config_file.is_file(): logging.info( "No config file specified. Trying to find config.toml in the ventoy drive..." ) - config_file = os.path.join(args.ventoy_path, "config.toml") + config_file = ventoy_path / "config.toml" - if not os.path.isfile(config_file): + if not config_file.is_file(): logging.info( "No config.toml found in the ventoy drive. Generating one from config.toml.default..." ) with open( - os.path.join( - os.path.dirname(__file__), "config", "config.toml.default" - ) + Path(__file__).parent / "config" / "config.toml.default" ) as default_config_file: with open(config_file, "w") as new_config_file: new_config_file.write(default_config_file.read()) @@ -188,7 +189,7 @@ def main(): available_updaters: list[Type[GenericUpdater]] = get_available_updaters() - run_updaters(args.ventoy_path, config, available_updaters) + run_updaters(ventoy_path, config, available_updaters) logging.debug("Finished execution") From 5f2bdf489ee962636f24c1ba80d84e5ce4cdcaa8 Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Wed, 7 Feb 2024 17:05:11 +0100 Subject: [PATCH 5/6] feat: Create folder for the config file if it doesn't exist --- sisou.py | 1 + 1 file changed, 1 insertion(+) diff --git a/sisou.py b/sisou.py index 53066bf..d732831 100644 --- a/sisou.py +++ b/sisou.py @@ -176,6 +176,7 @@ def main(): with open( Path(__file__).parent / "config" / "config.toml.default" ) as default_config_file: + config_file.parent.mkdir(parents=True, exist_ok=True) with open(config_file, "w") as new_config_file: new_config_file.write(default_config_file.read()) logging.info( From 302548c229c9ea93d7a0d73babab1db4989015e3 Mon Sep 17 00:00:00 2001 From: Joshua Vandaele Date: Fri, 9 Feb 2024 15:00:12 +0100 Subject: [PATCH 6/6] chore: Bump version number --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9b120d5..e132c04 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ setup( name="sisou", # Required - version="1.1.3", # Required + version="1.1.4", # Required description="A powerful tool to conveniently update all of your ISO files!", # Optional long_description=long_description, # Optional long_description_content_type="text/markdown", # Optional