Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat!: support multiple backends #51

Draft
wants to merge 24 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
99a156d
refactor: dynamically import tnp in backend module
redeboer May 27, 2021
8b9c454
refactor: create numpy interface for tf.random
redeboer May 27, 2021
abc86a2
refactor: generalize random module
redeboer May 27, 2021
8a3030f
refactor: move tf.shape to backend module
redeboer May 27, 2021
609754a
refactor: move tf.assert's to backend module
redeboer May 27, 2021
94ebe80
refactor: move Tensor/Variable to backend module
redeboer May 27, 2021
170c620
refactor: remove last tensorflow imports
redeboer May 27, 2021
e450c6c
test: add skip decorator for tests that requrie TF
redeboer May 27, 2021
3ff2a1c
test: use phasespace.backend in tests
redeboer May 27, 2021
2aa6f93
test: use uniform instead of uniform_full_int
redeboer May 27, 2021
fe2501a
feat: provide mapping for numpy as backend
redeboer May 27, 2021
d1bb7dd
refactor!: set numpy as default backend
redeboer May 27, 2021
3bd1f85
build!: move tensorflow requirements to extras
redeboer May 27, 2021
e98d7cd
ci: run pytest with NumPy as backend
redeboer May 27, 2021
290852c
fix: remove duplicate Tensor/Variable import
redeboer May 27, 2021
0dbfdfc
Merge branch 'master' into numpy-backend
redeboer May 28, 2021
1c80cc8
Merge branch 'master' into numpy-backend
redeboer Sep 21, 2021
8413194
revert: move tensorflow requirements to extras
redeboer Sep 21, 2021
669aaf7
Merge branch 'master' into numpy-backend
jonas-eschle Mar 31, 2022
dc9b865
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Mar 31, 2022
1e23047
Merge branch 'master' into numpy-backend
jonas-eschle Aug 25, 2022
aeea1a7
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Aug 25, 2022
7707f58
Merge branch 'master' into numpy-backend
jonas-eschle Feb 8, 2024
1c321a3
[pre-commit.ci] auto fixes from pre-commit.com hooks
pre-commit-ci[bot] Feb 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,13 @@ jobs:
cd ..
- name: Test with pytest (eager mode)
run: |
PHASESPACE_EAGER=1 pytest --basetemp={envtmpdir} tests
PHASESPACE_EAGER=1 pytest --basetemp={envtmpdir}
cd docs
PHASESPACE_EAGER=1 pytest --dist loadscope --nbval-lax --ignore=.ipynb_checkpoints -n${{ steps.cpu-cores.outputs.count }}
cd ..
- name: Test with pytest (NumPy backend)
run: |
PHASESPACE_BACKEND=NUMPY pytest --basetemp={envtmpdir}
cd docs
PHASESPACE_EAGER=NUMPY pytest --nbval-lax --ignore=.ipynb_checkpoints
cd ..
1 change: 1 addition & 0 deletions data/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/B2K*.root*
22 changes: 8 additions & 14 deletions phasespace/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,21 +11,15 @@
__email__ = "[email protected]"
__maintainer__ = "zfit"

__credits__ = ["Jonas Eschle <[email protected]>"]
__credits__ = [
"Jonas Eschle <[email protected]>",
]

__all__ = ["nbody_decay", "GenParticle", "random"]

import tensorflow as tf
__all__ = [
"GenParticle",
"nbody_decay",
"random",
]

from . import random
from .phasespace import GenParticle, nbody_decay


def _set_eager_mode():
import os

is_eager = bool(os.environ.get("PHASESPACE_EAGER"))
tf.config.run_functions_eagerly(is_eager)


_set_eager_mode()
21 changes: 0 additions & 21 deletions phasespace/backend.py

This file was deleted.

82 changes: 82 additions & 0 deletions phasespace/backend/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# flake8: noqa
import os
from enum import Enum, auto

__all__ = [
"function",
"function_jit",
"function_jit_fixedshape",
"get_shape",
"random",
"tnp",
]


class BackendType(Enum):
TENSORFLOW = auto()
NUMPY = auto()

@staticmethod
def get_backend(backend: str) -> "BackendType":
backend_formatted = backend.lower().strip()
if backend_formatted in {"", "np", "numpy"}:
return BackendType.NUMPY
if backend_formatted in {"tf", "tensorflow"}:
return BackendType.TENSORFLOW
raise NotImplementedError(f'No backend implemented for "{backend}"')


BACKEND = BackendType.get_backend(os.environ.get("PHASESPACE_BACKEND", ""))
if BACKEND == BackendType.TENSORFLOW:
import tensorflow as tf
import tensorflow.experimental.numpy as tnp

from . import _tf_random as random

if int(tf.__version__.split(".")[1]) < 5: # smaller than 2.5
jit_compile_argname = "experimental_compile"
else:
jit_compile_argname = "jit_compile"
function = tf.function(
autograph=False,
experimental_relax_shapes=True,
**{jit_compile_argname: False},
)
function_jit = tf.function(
autograph=False,
experimental_relax_shapes=True,
**{jit_compile_argname: True},
)
function_jit_fixedshape = tf.function(
autograph=False,
experimental_relax_shapes=False,
**{jit_compile_argname: True},
)

Tensor = tf.Tensor
Variable = tf.Variable
get_shape = tf.shape # get shape dynamically
assert_equal = tf.assert_equal
assert_greater_equal = tf.debugging.assert_greater_equal

is_eager = bool(os.environ.get("PHASESPACE_EAGER"))
tf.config.run_functions_eagerly(is_eager)

if BACKEND == BackendType.NUMPY:
import numpy as tnp

from . import _np_random as random

function = lambda x: x
function_jit = lambda x: x
function_jit_fixedshape = lambda x: x

Tensor = tnp.ndarray
Variable = tnp.ndarray
get_shape = tnp.shape

def assert_equal(x, y, message: str, name: str = "") -> None:
return tnp.testing.assert_equal(x, y, err_msg=message)

def assert_greater_equal(x, y, message: str, name: str = "") -> None:
return tnp.testing.assert_array_less(-x, -y, err_msg=message)
22 changes: 22 additions & 0 deletions phasespace/backend/_np_random.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
from __future__ import annotations

__all__ = [
"Generator",
"from_seed",
"default_rng",
]

from typing import Optional

from numpy.random import PCG64, BitGenerator, Generator, default_rng


def from_seed(
seed,
alg: type[BitGenerator] | None = None,
) -> Generator:
"""Function that mimicks `tf.random.Generator.from_seed`."""
if alg is None:
alg = PCG64
bit_generator = alg(seed)
return Generator(bit_generator)
10 changes: 10 additions & 0 deletions phasespace/backend/_tf_random.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
__all__ = [
"Generator",
"from_seed",
"default_rng",
]

from tensorflow.random import Generator
from tensorflow.random import get_global_generator as default_rng

from_seed = Generator.from_seed
4 changes: 1 addition & 3 deletions phasespace/kinematics.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@
# =============================================================================
"""Basic kinematics."""

import tensorflow.experimental.numpy as tnp

from phasespace.backend import function, function_jit
from phasespace.backend import function, function_jit, tnp


@function_jit
Expand Down
58 changes: 30 additions & 28 deletions phasespace/phasespace.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,18 @@
from collections.abc import Callable
from math import pi

import tensorflow as tf
import tensorflow.experimental.numpy as tnp

from . import kinematics as kin
from .backend import function, function_jit_fixedshape
from .random import SeedLike, get_rng

RELAX_SHAPES = False
from .backend import (
Tensor,
Variable,
assert_equal,
assert_greater_equal,
function,
function_jit_fixedshape,
get_shape,
tnp,
)
from .random import SeedLike, generate_uniform, get_rng


def process_list_to_tensor(lst):
Expand Down Expand Up @@ -94,12 +98,10 @@ def __init__(self, name: str, mass: Callable | int | float) -> None: # noqa
self.name = name
self.children = []
self._mass_val = mass
if not callable(mass) and not isinstance(mass, tf.Variable):
if not callable(mass) and not isinstance(mass, Variable):
mass = tnp.asarray(mass, dtype=tnp.float64)
else:
mass = tf.function(
mass, autograph=False, experimental_relax_shapes=RELAX_SHAPES
)
mass = function(mass)
self._mass = mass
self._generate_called = False # not yet called, children can be set

Expand Down Expand Up @@ -129,11 +131,11 @@ def get_list_of_names(part):
@function
def get_mass(
self,
min_mass: tf.Tensor = None,
max_mass: tf.Tensor = None,
n_events: tf.Tensor | tf.Variable = None,
min_mass: Tensor = None,
max_mass: Tensor = None,
n_events: Union[Tensor, Variable] = None,
seed: SeedLike = None,
) -> tf.Tensor:
) -> Tensor:
"""Get the particle mass.

If the particle is resonant, the mass function will be called with the
Expand Down Expand Up @@ -246,11 +248,11 @@ def _preprocess(momentum, n_events):
if n_events is not None:
momentum_shape = momentum.shape[0]
if momentum_shape is None:
momentum_shape = tf.shape(momentum)[0]
momentum_shape = get_shape(momentum)[0]
momentum_shape = tnp.asarray(momentum_shape, tnp.int64)
else:
momentum_shape = tnp.asarray(momentum_shape, dtype=tnp.int64)
tf.assert_equal(
assert_equal(
n_events,
momentum_shape,
message="Conflicting inputs -> momentum_shape and n_events",
Expand All @@ -260,7 +262,7 @@ def _preprocess(momentum, n_events):
if len(momentum.shape) == 2:
n_events = momentum.shape[0]
if n_events is None: # dynamic shape
n_events = tf.shape(momentum)[0]
n_events = get_shape(momentum)[0]
n_events = tnp.asarray(n_events, dtype=tnp.int64)
else:
n_events = tnp.asarray(1, dtype=tnp.int64)
Expand Down Expand Up @@ -339,7 +341,7 @@ def recurse_stable(part):
# if len(masses.shape) == 1:
# masses = tnp.expand_dims(masses, axis=0)
available_mass = top_mass - tnp.sum(masses, axis=1, keepdims=True)
tf.debugging.assert_greater_equal(
assert_greater_equal(
available_mass,
tnp.zeros_like(available_mass, dtype=tnp.float64),
message="Forbidden decay",
Expand All @@ -348,7 +350,7 @@ def recurse_stable(part):
w_max = self._get_w_max(available_mass, masses)
p_top_boost = kin.boost_components(p_top)
# Start the generation
random_numbers = rng.uniform((n_events, n_particles - 2), dtype=tnp.float64)
random_numbers = generate_uniform(rng, shape=(n_events, n_particles - 2))
random = tnp.concatenate(
[
tnp.zeros((n_events, 1), dtype=tnp.float64),
Expand Down Expand Up @@ -425,14 +427,14 @@ def _generate_part2(inv_masses, masses, n_events, n_particles, rng):
)
)

cos_z = tnp.asarray(2.0, dtype=tnp.float64) * rng.uniform(
(n_events, 1), dtype=tnp.float64
cos_z = tnp.asarray(2.0, dtype=tnp.float64) * generate_uniform(
rng, shape=(n_events, 1)
) - tnp.asarray(1.0, dtype=tnp.float64)
sin_z = tnp.sqrt(tnp.asarray(1.0, dtype=tnp.float64) - cos_z * cos_z)
ang_y = (
tnp.asarray(2.0, dtype=tnp.float64)
* tnp.asarray(pi, dtype=tnp.float64)
* rng.uniform((n_events, 1), dtype=tnp.float64)
* generate_uniform(rng, shape=(n_events, 1))
)
cos_y = tnp.cos(ang_y)
sin_y = tnp.sin(ang_y)
Expand Down Expand Up @@ -610,11 +612,11 @@ def recurse_w_max(parent_mass, current_mass_tree):

def generate(
self,
n_events: int | tf.Tensor | tf.Variable,
boost_to: tf.Tensor | None = None,
n_events: Union[int, Tensor, Variable],
boost_to: Optional[Tensor] = None,
normalize_weights: bool = True,
seed: SeedLike = None,
) -> tuple[tf.Tensor, dict[str, tf.Tensor]]:
) -> Tuple[Tensor, Dict[str, Tensor]]:
"""Generate normalized n-body phase space as tensorflow tensors.

Any TensorFlow tensor can always be converted to a numpy array with the method `numpy()`.
Expand Down Expand Up @@ -657,8 +659,8 @@ def generate(
f"The number of events requested ({n_events}) doesn't match the boost_to input size "
f"of {boost_to.shape}"
)
tf.assert_equal(tf.shape(boost_to)[0], tf.shape(n_events), message=message)
if not isinstance(n_events, tf.Variable):
assert_equal(len(boost_to), n_events, message=message)
if not isinstance(n_events, Variable):
n_events = tnp.asarray(n_events, dtype=tnp.int64)
weights, weights_max, parts, _ = self._recursive_generate(
n_events=n_events,
Expand Down
23 changes: 17 additions & 6 deletions phasespace/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,16 @@
It mimicks the TensorFlows API on random generators and relies (currently) in global states on the TF states.
Especially on the global random number generator which will be used to get new generators.
"""
from __future__ import annotations

from typing import Optional, Union

import tensorflow as tf
from phasespace.backend import random, tnp

SeedLike = Optional[Union[int, tf.random.Generator]]
SeedLike = Optional[Union[int, random.Generator]]


def get_rng(seed: SeedLike = None) -> tf.random.Generator:
def get_rng(seed: SeedLike = None) -> random.Generator:
"""Get or create a random number generators of type `tf.random.Generator`.

This can be used to either retrieve random number generators deterministically from them
Expand All @@ -33,9 +35,18 @@ def get_rng(seed: SeedLike = None) -> tf.random.Generator:
A list of `tf.random.Generator`
"""
if seed is None:
rng = tf.random.get_global_generator()
elif not isinstance(seed, tf.random.Generator): # it's a seed, not an rng
rng = tf.random.Generator.from_seed(seed=seed)
rng = random.default_rng()
elif not isinstance(seed, random.Generator): # it's a seed, not an rng
rng = random.from_seed(seed)
else:
rng = seed
return rng


def generate_uniform(
rng: random.Generator, shape: tuple[int, ...], minval=0, maxval=1, dtype=tnp.float64
) -> tnp.ndarray:
try:
return rng.uniform(shape, minval=minval, maxval=maxval, dtype=dtype)
except TypeError:
return rng.uniform(low=minval, high=maxval, size=shape).astype(dtype)
Loading
Loading