Skip to content

Commit

Permalink
Fix ruff PLC0206 and PLR6104 (#4035)
Browse files Browse the repository at this point in the history
* fix ruff PLC0206

* fix/ignore ruff PLR6104

* simplify np.prod(arr.shape) -> arr.size

* add missing args to get_partial_doses doc str

* fix ruff N818 Exception name `SymmetryUndetermined` should be named with an Error suffix

* fix TestGruneisenParameter.test_average_gruneisen

    def test_average_gruneisen(self):
        assert self.gruneisen_obj.average_gruneisen() == approx(1.164231026696211)
>       assert self.gruneisen_obj.average_gruneisen(squared=False) == approx(0.849759667411049)
E       assert 1.3554338835221134 == 0.849759667411049 ± 8.5e-07
E
E         comparison failed
E         Obtained: 1.3554338835221134
E         Expected: 0.849759667411049 ± 8.5e-07

* auto-format .github/workflows/issue-metrics.yml

* rename index variables
  • Loading branch information
janosh committed Sep 8, 2024
1 parent 13182c5 commit f6b4073
Show file tree
Hide file tree
Showing 64 changed files with 383 additions and 391 deletions.
44 changes: 22 additions & 22 deletions .github/workflows/issue-metrics.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Monthly issue metrics
on:
workflow_dispatch:
schedule:
- cron: '3 2 1 * *'
- cron: "3 2 1 * *" # Run at 2:03am on the first of every month

permissions:
contents: read
Expand All @@ -17,28 +17,28 @@ jobs:
issues: write
pull-requests: read
steps:
- name: Get dates for last month
shell: bash
run: |
# Calculate the first day of the previous month
first_day=$(date -d "last month" +%Y-%m-01)
- name: Get dates for last month
shell: bash
run: |
# Calculate the first day of the previous month
first_day=$(date -d "last month" +%Y-%m-01)
# Calculate the last day of the previous month
last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d)
# Calculate the last day of the previous month
last_day=$(date -d "$first_day +1 month -1 day" +%Y-%m-%d)
#Set an environment variable with the date range
echo "$first_day..$last_day"
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
#Set an environment variable with the date range
echo "$first_day..$last_day"
echo "last_month=$first_day..$last_day" >> "$GITHUB_ENV"
- name: Run issue-metrics tool
uses: github/issue-metrics@v3
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SEARCH_QUERY: 'repo:materialsproject/pymatgen is:issue created:${{ env.last_month }} -reason:"not planned"'
- name: Run issue-metrics tool
uses: github/issue-metrics@v3
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SEARCH_QUERY: 'repo:materialsproject/pymatgen is:issue created:${{ env.last_month }} -reason:"not planned"'

- name: Create issue
uses: peter-evans/create-issue-from-file@v5
with:
title: Monthly issue metrics report
token: ${{ secrets.GITHUB_TOKEN }}
content-filepath: ./issue_metrics.md
- name: Create issue
uses: peter-evans/create-issue-from-file@v5
with:
title: Monthly issue metrics report
token: ${{ secrets.GITHUB_TOKEN }}
content-filepath: ./issue_metrics.md
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ def simple_expansion(
CoordinationEnvironmentMorphing
"""
morphing_description = [
{"ineighbor": i_nb, "site_type": "neighbor", "expansion_origin": "central_site"}
for i_nb in neighbors_indices
{"ineighbor": nbr_idx, "site_type": "neighbor", "expansion_origin": "central_site"}
for nbr_idx in neighbors_indices
]
return cls(
initial_environment_symbol=initial_environment_symbol,
Expand Down Expand Up @@ -158,11 +158,11 @@ def get_structure(self, morphing_factor):
if morphing["site_type"] != "neighbor":
raise ValueError(f"Key \"site_type\" is {morphing['site_type']} while it can only be neighbor")

i_site = morphing["ineighbor"] + 1
site_idx = morphing["ineighbor"] + 1
if morphing["expansion_origin"] == "central_site":
origin = bare_points[0]
vector = bare_points[i_site] - origin
coords[i_site] += vector * (morphing_factor - 1.0)
vector = bare_points[site_idx] - origin
coords[site_idx] += vector * (morphing_factor - 1.0)

return Structure(lattice=lattice, species=species, coords=coords, coords_are_cartesian=True)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,8 @@ def make_supergraph(graph, multiplicity, periodicity_vectors):
if isinstance(multiplicity, int) or len(multiplicity) == 1:
mult = multiplicity if isinstance(multiplicity, int) else multiplicity[0]
nodes = graph.nodes(data=True)
inodes = [isite for isite, data in nodes]
indices_nodes = {isite: inodes.index(isite) for isite in inodes}
node_indices = [idx for idx, data in nodes]
indices_nodes = {idx: node_indices.index(idx) for idx in node_indices}
edges = graph.edges(data=True, keys=True)
connecting_edges = []
other_edges = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,24 +46,24 @@ def get_structure_connectivity(self, light_structure_environments):
logging.info("Setup of structure connectivity graph")
structure_connectivity = StructureConnectivity(light_structure_environments)
structure_connectivity.add_sites()
for isite, _site in enumerate(light_structure_environments.structure):
site_neighbors_sets = light_structure_environments.neighbors_sets[isite]
for site_idx, _site in enumerate(light_structure_environments.structure):
site_neighbors_sets = light_structure_environments.neighbors_sets[site_idx]
if site_neighbors_sets is None:
continue
if len(site_neighbors_sets) > 1:
if self.multiple_environments_choice is None:
raise ValueError(f"Local environment of site {isite} is a mix and nothing is asked about it")
raise ValueError(f"Local environment of site {site_idx} is a mix and nothing is asked about it")
if self.multiple_environments_choice == "TAKE_HIGHEST_FRACTION":
imax = np.argmax(
[ee["ce_fraction"] for ee in light_structure_environments.coordination_environments[isite]]
idx_max = np.argmax(
[ee["ce_fraction"] for ee in light_structure_environments.coordination_environments[site_idx]]
)
print(f"IMAX {imax}")
site_neighbors_set = site_neighbors_sets[imax]
print(f"IMAX {idx_max}")
site_neighbors_set = site_neighbors_sets[idx_max]
else:
raise RuntimeError("Should not be here")
else:
site_neighbors_set = site_neighbors_sets[0]
structure_connectivity.add_bonds(isite, site_neighbors_set)
structure_connectivity.add_bonds(site_idx, site_neighbors_set)
return structure_connectivity

def setup_parameters(self, multiple_environments_choice):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,8 @@ def add_bonds(self, isite, site_neighbors_set):
exists = True
break
elif isite == nb_index_unitcell:
for isite1, ineighb1, data1 in existing_edges:
if isite1 == ineighb1 and (
for site_idx1, ineighb1, data1 in existing_edges:
if site_idx1 == ineighb1 and (
np.allclose(data1["delta"], nb_image_cell) or np.allclose(data1["delta"], -nb_image_cell)
):
exists = True
Expand Down Expand Up @@ -164,7 +164,7 @@ def setup_environment_subgraph(self, environments_symbols, only_atoms=None):
# Initialize graph for a subset of environments
self._environment_subgraph = nx.MultiGraph()
# Add the sites with the required environment(s)
for isite, ce_this_site_all in enumerate(self.light_structure_environments.coordination_environments):
for site_idx, ce_this_site_all in enumerate(self.light_structure_environments.coordination_environments):
if ce_this_site_all is None:
continue
if len(ce_this_site_all) == 0:
Expand All @@ -173,30 +173,30 @@ def setup_environment_subgraph(self, environments_symbols, only_atoms=None):
if ce_this_site in environments_symbols:
if only_atoms is None:
env_node = get_environment_node(
self.light_structure_environments.structure[isite],
isite,
self.light_structure_environments.structure[site_idx],
site_idx,
ce_this_site,
)
self._environment_subgraph.add_node(env_node)
elif self.light_structure_environments.structure.is_ordered:
if self.light_structure_environments.structure[isite].specie.symbol in only_atoms:
if self.light_structure_environments.structure[site_idx].specie.symbol in only_atoms:
env_node = get_environment_node(
self.light_structure_environments.structure[isite],
isite,
self.light_structure_environments.structure[site_idx],
site_idx,
ce_this_site,
)
self._environment_subgraph.add_node(env_node)
else:
# TODO add the possibility of a "constraint" on the minimum percentage
# of the atoms on the site
this_site_elements = [
sp.symbol for sp in self.light_structure_environments.structure[isite].species_and_occu
sp.symbol for sp in self.light_structure_environments.structure[site_idx].species_and_occu
]
for elem_symbol in this_site_elements:
if elem_symbol in only_atoms:
env_node = get_environment_node(
self.light_structure_environments.structure[isite],
isite,
self.light_structure_environments.structure[site_idx],
site_idx,
ce_this_site,
)
self._environment_subgraph.add_node(env_node)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,9 +262,9 @@ def equivalent_site_index_and_transform(self, psite):
uc_psite = psite.to_unit_cell()
site_idx = self.structure_environments.structure.index(uc_psite)
except ValueError:
for isite2, site2 in enumerate(self.structure_environments.structure):
for site_idx2, site2 in enumerate(self.structure_environments.structure):
if psite.is_periodic_image(site2):
site_idx = isite2
site_idx = site_idx2
break
# Get the translation between psite and its corresponding site in the unit cell (Translation I)
this_site = self.structure_environments.structure[site_idx]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,7 @@ def compute_structure_environments(
]

if only_indices is not None:
sites_indices = [isite for isite in indices if isite in only_indices]
sites_indices = [*set(indices) & set(only_indices)]

# Get the VoronoiContainer for the sites defined by their indices (sites_indices)
logging.debug("Getting DetailedVoronoiContainer")
Expand Down Expand Up @@ -749,22 +749,22 @@ def compute_structure_environments(
self.detailed_voronoi.separations = [None] * len(self.structure)

# Loop on all the sites
for isite, site in enumerate(self.structure):
if isite not in sites_indices:
logging.debug(f" ... in site #{isite}/{len(self.structure)} ({site.species_string}) : skipped")
for site_idx, site in enumerate(self.structure):
if site_idx not in sites_indices:
logging.debug(f" ... in site #{site_idx}/{len(self.structure)} ({site.species_string}) : skipped")
continue
if break_it:
logging.debug(
f" ... in site #{isite}/{len(self.structure)} ({site.species_string}) : skipped (timelimit)"
f" ... in site #{site_idx}/{len(self.structure)} ({site.species_string}) : skipped (timelimit)"
)
continue
logging.debug(f" ... in site #{isite}/{len(self.structure)} ({site.species_string})")
logging.debug(f" ... in site #{site_idx}/{len(self.structure)} ({site.species_string})")
t1 = time.process_time()
if optimization > 0:
self.detailed_voronoi.local_planes[isite] = {}
self.detailed_voronoi.separations[isite] = {}
self.detailed_voronoi.local_planes[site_idx] = {}
self.detailed_voronoi.separations[site_idx] = {}
struct_envs.init_neighbors_sets(
isite=isite,
isite=site_idx,
additional_conditions=additional_conditions,
valences=valences,
)
Expand All @@ -773,15 +773,15 @@ def compute_structure_environments(
nb_sets_info = {}
cn = 0

for cn, nb_sets in struct_envs.neighbors_sets[isite].items():
for cn, nb_sets in struct_envs.neighbors_sets[site_idx].items():
if cn not in all_cns:
continue
for inb_set, nb_set in enumerate(nb_sets):
logging.debug(f" ... getting environments for nb_set ({cn}, {inb_set})")
t_nbset1 = time.process_time()
ce = self.update_nb_set_environments(
se=struct_envs,
isite=isite,
isite=site_idx,
cn=cn,
inb_set=inb_set,
nb_set=nb_set,
Expand Down Expand Up @@ -809,7 +809,7 @@ def compute_structure_environments(
logging.debug(f" hint # {idx_new}")
new_nb_set = struct_envs.NeighborsSet(
structure=struct_envs.structure,
isite=isite,
isite=site_idx,
detailed_voronoi=struct_envs.voronoi,
site_voronoi_indices=new_nb_set_voronoi_indices,
sources={
Expand All @@ -827,14 +827,14 @@ def compute_structure_environments(
continue
if new_nb_set in [ta["new_nb_set"] for ta in to_add_from_hints]:
has_nb_set = True
elif cn_new_nb_set not in struct_envs.neighbors_sets[isite]:
elif cn_new_nb_set not in struct_envs.neighbors_sets[site_idx]:
has_nb_set = False
else:
has_nb_set = new_nb_set in struct_envs.neighbors_sets[isite][cn_new_nb_set]
has_nb_set = new_nb_set in struct_envs.neighbors_sets[site_idx][cn_new_nb_set]
if not has_nb_set:
to_add_from_hints.append(
{
"isite": isite,
"isite": site_idx,
"new_nb_set": new_nb_set,
"cn_new_nb_set": cn_new_nb_set,
}
Expand All @@ -844,7 +844,7 @@ def compute_structure_environments(
logging.debug(" => already present")
logging.debug(" ... getting environments for nb_sets added from hints")
for missing_nb_set_to_add in to_add_from_hints:
struct_envs.add_neighbors_set(isite=isite, nb_set=missing_nb_set_to_add["new_nb_set"])
struct_envs.add_neighbors_set(isite=site_idx, nb_set=missing_nb_set_to_add["new_nb_set"])
for missing_nb_set_to_add in to_add_from_hints:
isite_new_nb_set = missing_nb_set_to_add["isite"]
cn_new_nb_set = missing_nb_set_to_add["cn_new_nb_set"]
Expand All @@ -865,7 +865,7 @@ def compute_structure_environments(
nb_sets_info[cn] = {}
nb_sets_info[cn][inew_nb_set] = {"time": t_nbset2 - t_nbset1}
t2 = time.process_time()
struct_envs.update_site_info(isite=isite, info_dict={"time": t2 - t1, "nb_sets_info": nb_sets_info})
struct_envs.update_site_info(isite=site_idx, info_dict={"time": t2 - t1, "nb_sets_info": nb_sets_info})
if timelimit is not None:
time_elapsed = t2 - time_init
time_left = timelimit - time_elapsed
Expand Down Expand Up @@ -1029,7 +1029,7 @@ def setup_test_perfect_environment(
# Rotating the test environment
if random_rotation == "RANDOM":
uu = rng.random(3) + 0.1
uu = uu / norm(uu)
uu /= norm(uu)
theta = np.pi * rng.random()
cos_theta = np.cos(theta)
sin_theta = np.sin(theta)
Expand Down
Loading

0 comments on commit f6b4073

Please sign in to comment.