-
Notifications
You must be signed in to change notification settings - Fork 31
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
fix/change: migrate test cases from unittest.TestCase to pytest based…
… with new test data loader (#173) * enhancement: add a badge for Azure Pipelines * enhancemnt: add new simple test data loader to be used with pytest.mark.parametrize - tests/common/__init__.py: Export some constants and functions added. - tests/common/test_tdc.py: Add test cases for new ones mentioned. * fix/change: migrate test cases from unittest based to pytest based with new test data loader * change: export RESOURCE_DIR and collect_for from tests.common to re-use them * fix: dce in tests.dicts.common * fix: lint fixes * refactor: simplify the code to collect test data and test cases * change: remove test data not used anymore
- Loading branch information
Showing
940 changed files
with
3,750 additions
and
2,964 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,49 +1,59 @@ | ||
# | ||
# Copyright (C) 2021 Satoru SATOH <satoru.satoh@gmail.com> | ||
# Copyright (C) 2021 - 2024 Satoru SATOH <satoru.satoh gmail.com> | ||
# SPDX-License-Identifier: MIT | ||
# | ||
# pylint: disable=missing-docstring | ||
import pathlib | ||
import tempfile | ||
"""Basic test cases for anyconfig.api.dump.""" | ||
from __future__ import annotations | ||
|
||
import typing | ||
|
||
import pytest | ||
|
||
import anyconfig.api._dump as TT | ||
|
||
from anyconfig.api import ( | ||
UnknownProcessorTypeError, UnknownFileTypeError | ||
UnknownFileTypeError, UnknownProcessorTypeError | ||
) | ||
|
||
from . import common | ||
|
||
|
||
class TestCase(common.BaseTestCase): | ||
|
||
def test_dump(self): | ||
with tempfile.TemporaryDirectory() as tdir: | ||
for data in self.each_data(): | ||
out = pathlib.Path(tdir) / 'out.json' | ||
TT.dump(data.inp, out, **data.opts) | ||
self.assertEqual( | ||
out.read_text().strip(), | ||
data.exp.strip(), | ||
f'{data.datadir!s}, {data.inp_path!s}' | ||
) | ||
|
||
def test_dump_intentional_failures(self): | ||
with tempfile.TemporaryDirectory() as tdir: | ||
for data in self.each_data(): | ||
out = pathlib.Path(tdir) / 'out.json' | ||
TT.dump(data.inp, out, **data.opts) | ||
with self.assertRaises(AssertionError): | ||
self.assertEqual(out.read_text().strip(), '') | ||
|
||
def test_dump_failure_ac_parser_was_not_given(self): | ||
for data in self.each_data(): | ||
with self.assertRaises(UnknownFileTypeError): | ||
TT.dump(data.inp, 'dummy.txt') | ||
|
||
def test_dump_failure_invalid_ac_parser_was_given(self): | ||
for data in self.each_data(): | ||
with self.assertRaises(UnknownProcessorTypeError): | ||
TT.dump(data.inp, 'dummy.json', ac_parser='invalid_id') | ||
|
||
# vim:sw=4:ts=4:et: | ||
from ... import common | ||
|
||
if typing.TYPE_CHECKING: | ||
import pathlib | ||
|
||
|
||
NAMES: tuple[str, ...] = ("obj", "opts", "exp") | ||
|
||
# .. seealso:: tests.common.tdc | ||
DATA_0: list = common.load_data_for_testfile(__file__, load_idata=True) | ||
DATA_IDS: list[str] = common.get_test_ids(DATA_0) | ||
DATA: list[tuple[typing.Any, dict, str]] = [ | ||
(i, o, e.strip()) for _, i, o, e in DATA_0 | ||
] | ||
|
||
|
||
def test_data() -> None: | ||
assert DATA | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_dump(obj, opts: dict, exp, tmp_path: pathlib.Path) -> None: | ||
out = tmp_path / "out.json" | ||
TT.dump(obj, out, **opts) | ||
assert out.read_text() == exp | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) | ||
def test_dump_without_ac_parser_option(obj, opts: dict, exp) -> None: | ||
assert opts or exp | ||
with pytest.raises(UnknownFileTypeError): | ||
TT.dump(obj, "out.txt") | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) | ||
def test_dump_with_invalid_ac_parser_option( | ||
obj, opts: dict, exp | ||
) -> None: | ||
assert opts or exp | ||
with pytest.raises(UnknownProcessorTypeError): | ||
TT.dump(obj, "out.json", ac_parser="invalid_id") |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,38 +1,52 @@ | ||
# | ||
# Copyright (C) 2021 Satoru SATOH <satoru.satoh@gmail.com> | ||
# Copyright (C) 2021 - 2024 Satoru SATOH <satoru.satoh gmail.com> | ||
# SPDX-License-Identifier: MIT | ||
# | ||
# pylint: disable=missing-docstring | ||
"""Basic test cases for anyconfig.api.dumps.""" | ||
from __future__ import annotations | ||
|
||
import typing | ||
|
||
import pytest | ||
|
||
import anyconfig.api._dump as TT | ||
|
||
from anyconfig.api import UnknownProcessorTypeError | ||
|
||
from . import common | ||
from ... import common | ||
|
||
|
||
NAMES: tuple[str, ...] = ("obj", "opts", "exp") | ||
|
||
# .. seealso:: tests.common.tdc | ||
DATA_0: list = common.load_data_for_testfile(__file__, load_idata=True) | ||
DATA_IDS: list[str] = common.get_test_ids(DATA_0) | ||
DATA: list[tuple[typing.Any, dict, str]] = [ | ||
(i, o, e.strip()) for _, i, o, e in DATA_0 | ||
] | ||
|
||
|
||
def test_data() -> None: | ||
assert DATA | ||
|
||
class TestCase(common.BaseTestCase): | ||
|
||
def test_dumps(self): | ||
for data in self.each_data(): | ||
self.assertEqual( | ||
TT.dumps(data.inp, **data.opts).strip(), | ||
data.exp.strip(), | ||
f'{data.datadir!s}, {data.inp_path!s}' | ||
) | ||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_dumps(obj, opts: dict, exp) -> None: | ||
assert TT.dumps(obj, **opts) == exp | ||
|
||
def test_dumps_intentional_failures(self): | ||
for data in self.each_data(): | ||
with self.assertRaises(AssertionError): | ||
self.assertEqual(TT.dumps(data.inp, **data.opts).strip(), {}) | ||
|
||
def test_dump_failure_ac_parser_was_not_given(self): | ||
for data in self.each_data(): | ||
with self.assertRaises(ValueError): | ||
TT.dumps(data.inp) | ||
@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) | ||
def test_dumps_without_ac_parser_option(obj, opts: dict, exp) -> None: | ||
assert opts or exp | ||
with pytest.raises(ValueError): | ||
TT.dumps(obj) | ||
|
||
def test_dump_failure_invalid_ac_parser_was_given(self): | ||
for data in self.each_data(): | ||
with self.assertRaises(UnknownProcessorTypeError): | ||
TT.dumps(data.inp, ac_parser='invalid_id') | ||
|
||
# vim:sw=4:ts=4:et: | ||
@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) | ||
def test_dumps_with_invalid_ac_parser_option( | ||
obj, opts: dict, exp | ||
) -> None: | ||
assert opts or exp | ||
with pytest.raises(UnknownProcessorTypeError): | ||
TT.dumps(obj, ac_parser="invalid_id") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,22 +1,13 @@ | ||
# | ||
# Copyright (C) 2021 Satoru SATOH <satoru.satoh@gmail.com> | ||
# Copyright (C) 2021 - 2024 Satoru SATOH <satoru.satoh gmail.com> | ||
# SPDX-License-Identifier: MIT | ||
# | ||
# pylint: disable=missing-docstring | ||
import anyconfig.api._load as TT | ||
|
||
|
||
class Basa: | ||
@staticmethod | ||
def target_fn(*args, **kwargs): | ||
return TT.load(*args, **kwargs) | ||
|
||
|
||
class MultiBase: | ||
target: str = 'load/multi' | ||
|
||
|
||
class SingleBase: | ||
target: str = 'load/single' | ||
|
||
# vim:sw=4:ts=4:et: | ||
# pylint: disable=unused-import | ||
"""Common module for tests.api.load.""" | ||
from __future__ import annotations | ||
|
||
from ...common import ( # noqa: F401 | ||
get_test_ids, load_data_for_testfile | ||
) | ||
from ..single_load.constants import LOADER_TYPES # noqa: F401 |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
# | ||
# Copyright (C) 2021 - 2024 Satoru SATOH <satoru.satoh gmail.com> | ||
# SPDX-License-Identifier: MIT | ||
# | ||
# pylint: disable=missing-docstring | ||
# pylint: disable=unused-import | ||
"""Common module for tests.api.load.""" | ||
from __future__ import annotations | ||
|
||
from ...multi_load.common import ( # noqa: F401 | ||
NAMES, GLOB_PATTERN, | ||
load_data_for_testfile, get_test_ids | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
# | ||
# Copyright (C) 2021 - 2024 Satoru SATOH <satoru.satoh gmail.com> | ||
# SPDX-License-Identifier: MIT | ||
# | ||
# pylint: disable=missing-docstring | ||
"""Test cases for anyconfig.api.load (multi_load).""" | ||
from __future__ import annotations | ||
|
||
import collections | ||
import typing | ||
|
||
import pytest | ||
|
||
import anyconfig.api._load as TT | ||
|
||
from .common import ( | ||
NAMES, GLOB_PATTERN, load_data_for_testfile, get_test_ids | ||
) | ||
|
||
if typing.TYPE_CHECKING: | ||
import pathlib | ||
|
||
|
||
DATA = load_data_for_testfile(__file__) | ||
DATA_IDS: list[str] = get_test_ids(DATA) | ||
|
||
DATA_W_GLOB = [ | ||
(inputs[0].parent / GLOB_PATTERN, opts, exp) | ||
for inputs, opts, exp in DATA | ||
] | ||
|
||
|
||
def test_data() -> None: | ||
assert DATA | ||
|
||
|
||
def test_load_with_empty_list() -> None: | ||
with pytest.raises(ValueError): | ||
TT.load([]) | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_load_for_a_list_of_path_objects( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
assert TT.load(inputs, **opts) == exp | ||
assert TT.load((i for i in inputs), **opts) == exp | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_load_for_a_list_of_path_strings( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
assert TT.load([str(i) for i in inputs], **opts) == exp | ||
assert TT.load((str(i) for i in inputs), **opts) == exp | ||
|
||
|
||
@pytest.mark.parametrize( | ||
NAMES, DATA_W_GLOB, ids=get_test_ids(DATA_W_GLOB) | ||
) | ||
def test_load_for_glob_patterns( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
assert TT.load(inputs, **opts) == exp | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_load_for_a_list_of_streams( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
assert TT.load([i.open() for i in inputs], **opts) == exp | ||
|
||
|
||
class MyDict(collections.OrderedDict): | ||
pass | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA, ids=DATA_IDS) | ||
def test_load_with_ac_dict_option( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
res = TT.load(inputs, ac_dict=MyDict, **opts) | ||
assert res == exp | ||
assert isinstance(res, MyDict) | ||
|
||
|
||
@pytest.mark.parametrize(NAMES, DATA[:1], ids=DATA_IDS[:1]) | ||
def test_load_with_wrong_merge_strategy( | ||
inputs: list[pathlib.Path], opts: dict, exp | ||
) -> None: | ||
assert exp # dummy to avoid an error of unused argument. | ||
with pytest.raises(ValueError): | ||
TT.load(inputs, ac_merge="wrong_merge_strategy", **opts) | ||
|
||
|
||
def test_load_with_ignore_missing_option(): | ||
paths = [ | ||
"/path/to/file_not_exist_0.json", | ||
"/path/to/file_not_exist_1.json", | ||
"/path/to/file_not_exist_2.json", | ||
] | ||
with pytest.raises(FileNotFoundError): | ||
TT.load(paths) | ||
|
||
assert TT.load(paths, ac_ignore_missing=True) == {} |
Oops, something went wrong.