Skip to content

Commit

Permalink
Pydantic bump, fix readthedocs w/ Poetry 1.8 (#847)
Browse files Browse the repository at this point in the history
* ran bump-pydantic utility

* docs clarifications, try creating venv

* RTD identified bug with poetry 1.8, implementing workaround

* don't set create venv false

* typo

* fix pydantic deprecations

* added tests/ to CI mypy
  • Loading branch information
harrisonliew authored Mar 7, 2024
1 parent 303c375 commit f42634e
Show file tree
Hide file tree
Showing 20 changed files with 235 additions and 172 deletions.
1 change: 1 addition & 0 deletions .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,4 @@ jobs:
touch .venv/lib/python${{ matrix.python-version }}/site-packages/ruamel/py.typed
touch .venv/lib/python${{ matrix.python-version }}/site-packages/networkx/py.typed
poetry run mypy --namespace-packages -p hammer
poetry run mypy --namespace-packages -p tests
8 changes: 5 additions & 3 deletions .readthedocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ build:
tools:
python: '3.10'
jobs:
post_install:
post_create_environment:
- pip install poetry myst-parser
- poetry config virtualenvs.create false
- poetry install
post_install:
# VIRTUAL_ENV needs to be set manually for now.
# See https://github.com/readthedocs/readthedocs.org/pull/11152/
- VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install
- python3 -c "from hammer.tech import TechJSON; print(TechJSON.schema_json(indent=2))" > doc/Technology/schema.json

# Build documentation with Sphinx
Expand Down
12 changes: 10 additions & 2 deletions doc/Hammer-Basics/Hammer-Setup.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,10 +137,11 @@ pytest tests/test_build_systems.py -k "flat_makefile" -rA -v

### Type Checking with mypy

There is a [small issue with the ruamel.yaml package typechecking](https://github.com/python/mypy/issues/12664) which can be hacked around with:
There is a [small issue with the ruamel.yaml package typechecking](https://github.com/python/mypy/issues/12664) which can be hacked around with (replace the python version with your own):

```shell
touch .venv/lib/python3.10/site-packages/ruamel/py.typed
touch .venv/lib/python3.10/site-packages/networkx/py.typed
```

Inside your poetry virtualenv, from the root of Hammer, run:
Expand Down Expand Up @@ -218,7 +219,14 @@ Run `poetry update` and `poetry install` and commit `poetry.lock`.

### Building Documentation

- Within your poetry virutualenv, `cd doc`
First, generate the `schema.json` file from within your poetry virtualenv:

```shell
python3 -c "from hammer.tech import TechJSON; print(TechJSON.schema_json(indent=2))" > doc/Technology/schema.json
```

Then:
- `cd doc`
- Modify any documentation files. You can migrate any rst file to Markdown if desired.
- Run `sphinx-build . build`
- The generated HTML files are placed in `build/`
Expand Down
54 changes: 27 additions & 27 deletions hammer/tech/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class MinMaxCap(BaseModel):

class Provide(BaseModel):
lib_type: str
vt: Optional[str]
vt: Optional[str] = None


class Supplies(BaseModel):
Expand Down Expand Up @@ -212,27 +212,27 @@ def from_setting(grid_unit: Decimal, d: Dict[str, Any]) -> "Site":

class TechJSON(BaseModel):
name: str
grid_unit: Optional[str]
shrink_factor: Optional[str]
installs: Optional[List[PathPrefix]]
libraries: Optional[List[Library]]
gds_map_file: Optional[str]
physical_only_cells_list: Optional[List[Cell]]
dont_use_list: Optional[List[Cell]]
drc_decks: Optional[List[DRCDeck]]
lvs_decks: Optional[List[LVSDeck]]
tarballs: Optional[List[Tarball]]
sites: Optional[List[Site]]
stackups: Optional[List[Stackup]]
special_cells: Optional[List[SpecialCell]]
extra_prefixes: Optional[List[PathPrefix]]
additional_lvs_text: Optional[str]
additional_drc_text: Optional[str]
grid_unit: Optional[str] = None
shrink_factor: Optional[str] = None
installs: Optional[List[PathPrefix]] = None
libraries: Optional[List[Library]] = None
gds_map_file: Optional[str] = None
physical_only_cells_list: Optional[List[Cell]] = None
dont_use_list: Optional[List[Cell]] = None
drc_decks: Optional[List[DRCDeck]] = None
lvs_decks: Optional[List[LVSDeck]] = None
tarballs: Optional[List[Tarball]] = None
sites: Optional[List[Site]] = None
stackups: Optional[List[Stackup]] = None
special_cells: Optional[List[SpecialCell]] = None
extra_prefixes: Optional[List[PathPrefix]] = None
additional_lvs_text: Optional[str] = None
additional_drc_text: Optional[str] = None


def copy_library(lib: Library) -> Library:
"""Perform a deep copy of a Library."""
return Library.parse_raw(lib.json())
return Library.model_validate_json(lib.model_dump_json())


def library_from_json(json: str) -> Library:
Expand All @@ -241,12 +241,12 @@ def library_from_json(json: str) -> Library:
:param json: JSON string.
:return: hammer_tech library.
"""
return Library.parse_raw(json)
return Library.model_validate_json(json)


# Struct that holds an extra library and possible prefix.
class ExtraLibrary(BaseModel):
prefix: Optional[PathPrefix]
prefix: Optional[PathPrefix] = None
library: Library

def store_into_library(self) -> Library:
Expand All @@ -271,7 +271,7 @@ class MacroSize(BaseModel):
height: Decimal

def to_setting(self) -> dict:
return self.dict()
return self.model_dump()

@staticmethod
def from_setting(d: dict) -> "MacroSize":
Expand Down Expand Up @@ -370,10 +370,10 @@ def load_from_module(cls, tech_module: str) -> Optional["HammerTechnology"]:
tech_yaml = importlib.resources.files(tech_module) / f"{technology_name}.tech.yml"

if tech_json.is_file():
tech.config = TechJSON.parse_raw(tech_json.read_text())
tech.config = TechJSON.model_validate_json(tech_json.read_text())
return tech
elif tech_yaml.is_file():
tech.config = TechJSON.parse_raw(json.dumps(load_yaml(tech_yaml.read_text())))
tech.config = TechJSON.model_validate_json(json.dumps(load_yaml(tech_yaml.read_text())))
return tech
else: #TODO - from Pydantic model instance
return None
Expand Down Expand Up @@ -535,7 +535,7 @@ def parse_library(lib: dict) -> Library:
raise TypeError("lib must be a dict")

# Convert the dict to JSON...
return Library.parse_raw(json.dumps(lib, cls=HammerJSONEncoder))
return Library.model_validate_json(json.dumps(lib, cls=HammerJSONEncoder))

@property
def tech_defined_libraries(self) -> List[Library]:
Expand Down Expand Up @@ -582,7 +582,7 @@ def extraction_func(lib: Library, paths: List[str]) -> List[str]:
name = str(lib_name)
return [json.dumps([paths[0], name], cls=HammerJSONEncoder)]

lef_filter_plus = filters.lef_filter.copy(deep=True)
lef_filter_plus = filters.lef_filter.model_copy(deep=True)
lef_filter_plus.extraction_func = extraction_func

lef_names_filenames_serialized = self.process_library_filter(filt=lef_filter_plus,
Expand Down Expand Up @@ -779,7 +779,7 @@ def get_extra_libraries(self) -> List[ExtraLibrary]:
if not isinstance(extra_libs, list):
raise ValueError("extra_libraries was not a list")
else:
return [ExtraLibrary.parse_obj(lib) for lib in extra_libs]
return [ExtraLibrary.model_validate(lib) for lib in extra_libs]

def get_available_libraries(self) -> List[Library]:
"""
Expand Down Expand Up @@ -945,7 +945,7 @@ def filter_for_supplies(self, lib: Library) -> bool:
for provided in lib.provides:
if provided.lib_type is not None and provided.lib_type == "technology":
return True
self.logger.warning("Lib %s has no supplies annotation! Using anyway." % (lib.json()))
self.logger.warning("Lib %s has no supplies annotation! Using anyway." % (lib.model_dump_json()))
return True
return self.get_setting("vlsi.inputs.supplies.VDD") == lib.supplies.VDD and self.get_setting(
"vlsi.inputs.supplies.GND") == lib.supplies.GND
Expand Down
13 changes: 5 additions & 8 deletions hammer/tech/specialcells.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from enum import Enum
from typing import List, Optional

from pydantic import BaseModel
from pydantic import ConfigDict, BaseModel


class CellType(str, Enum):
Expand All @@ -29,10 +29,7 @@ class SpecialCell(BaseModel):
# Endcap, filler, etc.
cell_type: CellType
name: List[str]
size: Optional[List[str]]
input_ports: Optional[List[str]]
output_ports: Optional[List[str]]

class Config:
# https://stackoverflow.com/questions/65209934/pydantic-enum-field-does-not-get-converted-to-string
use_enum_values = True
size: Optional[List[str]] = None
input_ports: Optional[List[str]] = None
output_ports: Optional[List[str]] = None
model_config = ConfigDict(use_enum_values=True)
21 changes: 10 additions & 11 deletions hammer/tech/stackup.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from functools import partial
from typing import Any, List, Tuple, Optional

from pydantic import BaseModel, root_validator
from pydantic import model_validator, ConfigDict, BaseModel

from hammer.utils import coerce_to_grid
from hammer.logging import HammerVLSILoggingContext
Expand Down Expand Up @@ -51,8 +51,8 @@ class WidthSpacingTuple(BaseModel):

@staticmethod
def from_setting(grid_unit: Decimal, d: dict) -> "WidthSpacingTuple":
width_at_least = coerce_to_grid(d["width_at_least"], grid_unit)
min_spacing = coerce_to_grid(d["min_spacing"], grid_unit)
width_at_least = coerce_to_grid(Decimal(str(d["width_at_least"])), grid_unit)
min_spacing = coerce_to_grid(Decimal(str(d["min_spacing"])), grid_unit)
assert width_at_least >= 0
assert min_spacing > 0
return WidthSpacingTuple(
Expand Down Expand Up @@ -110,11 +110,10 @@ class Metal(BaseModel):
# Note: grid_unit is not currently parsed as part of the Metal data structure!
# See #379
grid_unit: Decimal
model_config = ConfigDict(use_enum_values=True)

class Config:
use_enum_values = True

@root_validator(pre=True)
@model_validator(mode="before")
@classmethod
def widths_must_snap_to_grid(cls, values):
grid_unit = Decimal(str(values.get("grid_unit")))
for field in ["min_width", "pitch", "offset"]:
Expand Down Expand Up @@ -148,10 +147,10 @@ def from_setting(grid_unit: Decimal, d: dict) -> "Metal":
name=str(d["name"]),
index=int(d["index"]),
direction=RoutingDirection(d["direction"]),
min_width=coerce_to_grid(d["min_width"], grid_unit),
max_width=coerce_to_grid(d["max_width"], grid_unit) if "max_width" in d and d["max_width"] is not None else None,
pitch=coerce_to_grid(d["pitch"], grid_unit),
offset=coerce_to_grid(d["offset"], grid_unit),
min_width=coerce_to_grid(Decimal(str(d["min_width"])), grid_unit),
max_width=coerce_to_grid(Decimal(str(d["max_width"])), grid_unit) if "max_width" in d and d["max_width"] is not None else None,
pitch=coerce_to_grid(Decimal(str(d["pitch"])), grid_unit),
offset=coerce_to_grid(Decimal(str(d["offset"])), grid_unit),
power_strap_widths_and_spacings=WidthSpacingTuple.from_list(grid_unit, d["power_strap_widths_and_spacings"]),
power_strap_width_table=Metal.power_strap_widths_from_list(grid_unit, d["power_strap_width_table"] if "power_strap_width_table" in d and d["power_strap_width_table"] else [])
)
Expand Down
2 changes: 1 addition & 1 deletion hammer/vlsi/hammer_vlsi_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ def export_config_outputs(self) -> Dict[str, Any]:
outputs = deepdict(super().export_config_outputs())
simple_ex = []
for ex in self.output_libraries: # type: ExtraLibrary
simple_lib = json.loads(ex.library.json())
simple_lib = json.loads(ex.library.model_dump_json())
if(ex.prefix == None):
new_ex = {"library": simple_lib}
else:
Expand Down
Loading

0 comments on commit f42634e

Please sign in to comment.