id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
22,724 | import contextlib
import dataclasses
import importlib.util
import inspect
import typing as tp
from abc import ABCMeta
from copy import copy
from functools import partial
from types import MappingProxyType
import jax
import numpy as np
from flax.experimental.nnx.nnx import module as modulelib
from flax.experimental.nnx.nnx import reprlib, variables
from flax.experimental.nnx.nnx.state import State
P = tp.TypeVar('P', bound='Pytree')
def _mutable(obj: P) -> tp.Iterator[None]:
vars(obj)['_pytree__is_mutable'] = True
try:
yield
finally:
del vars(obj)['_pytree__is_mutable'] | null |
22,725 | import contextlib
import dataclasses
import importlib.util
import inspect
import typing as tp
from abc import ABCMeta
from copy import copy
from functools import partial
from types import MappingProxyType
import jax
import numpy as np
from flax.experimental.nnx.nnx import module as modulelib
from flax.experimental.nnx.nnx import reprlib, variables
from flax.experimental.nnx.nnx.state import State
P = tp.TypeVar('P', bound='Pytree')
def _initializing(obj: P) -> tp.Iterator[None]:
vars(obj)['_pytree__initializing'] = True
try:
yield
finally:
del vars(obj)['_pytree__initializing'] | null |
22,726 | from __future__ import annotations
import dataclasses
import typing as tp
import typing_extensions as tpe
def _identity(x):
return x | null |
22,727 | import dataclasses
import functools
import typing as tp
from abc import ABCMeta
from functools import partial
from typing import Any
import jax
import jax.tree_util as jtu
from flax.experimental.nnx.nnx import reprlib, tracers
from flax.experimental import nnx
jtu.register_pytree_node(
Empty,
lambda empty: ((), None),
lambda _0, _1: EMPTY,
)
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
jtu.register_pytree_with_keys(
Variable,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=Variable), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
def _variable_flatten(x: Variable[tp.Any], *, with_keys: bool):
attributes = vars(x).copy()
del attributes['_trace_state']
value = attributes.pop('raw_value')
if with_keys:
node = (jtu.GetAttrKey('raw_value'), value)
else:
node = value
return (node,), attributes | null |
22,728 | import dataclasses
import functools
import typing as tp
from abc import ABCMeta
from functools import partial
from typing import Any
import jax
import jax.tree_util as jtu
from flax.experimental.nnx.nnx import reprlib, tracers
from flax.experimental import nnx
A = tp.TypeVar('A')
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
def _variable_unflatten(
metadata: tp.Mapping[str, tp.Any],
children: tp.Tuple[A],
*,
cls: type[Variable[A]],
) -> Variable[A]:
variable = object.__new__(cls)
vars(variable).update(metadata, _trace_state=tracers.TraceState(), raw_value=children[0])
return variable | null |
22,729 | import dataclasses
import functools
import typing as tp
from abc import ABCMeta
from functools import partial
from typing import Any
import jax
import jax.tree_util as jtu
from flax.experimental.nnx.nnx import reprlib, tracers
from flax.experimental import nnx
VariableTypeCache: dict[str, tp.Type['Variable[tp.Any]']] = {}
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
VariableTypeCache['params'] = Param
VariableTypeCache['batch_stats'] = BatchStat
VariableTypeCache['cache'] = Cache
VariableTypeCache['intermediates'] = Intermediate
def variable_type(name: str) -> tp.Type[Variable[tp.Any]]:
if name not in VariableTypeCache:
VariableTypeCache[name] = type(name, (Variable,), {})
return VariableTypeCache[name] | null |
22,730 | from __future__ import annotations
import dataclasses
import functools
import typing as tp
import jax
from flax.experimental.nnx.nnx import errors, filterlib, tracers
class ForkedKeys(tp.Mapping[str, jax.Array]):
def __init__(
self,
broadcast_rngs: dict[str, jax.Array],
split_rngs: dict[str, jax.Array],
):
self.broadcasts = broadcast_rngs
self.splits = split_rngs
def __getitem__(self, key: str) -> jax.Array:
if key in self.broadcasts:
return self.broadcasts[key]
elif key in self.splits:
return self.splits[key]
else:
raise KeyError(f'Key "{key}" not found in SplitRng.')
def __iter__(self) -> tp.Iterator[str]:
yield from self.broadcasts
yield from self.splits
def __len__(self) -> int:
return len(self.broadcasts) + len(self.splits)
jax.tree_util.register_pytree_with_keys(
ForkedKeys,
functools.partial(_split_rng_flatten, with_keys=True),
_split_rng_unflatten,
flatten_func=functools.partial(_split_rng_flatten, with_keys=False),
)
def _split_rng_flatten(rngs: ForkedKeys, *, with_keys: bool):
broadcast_names = sorted(rngs.broadcasts.keys())
split_names = sorted(rngs.splits.keys())
items = [(name, rngs.broadcasts[name]) for name in broadcast_names]
items += [(name, rngs.splits[name]) for name in split_names]
if with_keys:
nodes = tuple((jax.tree_util.DictKey(name), value) for name, value in items)
else:
nodes = tuple(value for _, value in items)
metadata = (broadcast_names, split_names)
return nodes, metadata | null |
22,731 | from __future__ import annotations
import dataclasses
import functools
import typing as tp
import jax
from flax.experimental.nnx.nnx import errors, filterlib, tracers
class ForkedKeys(tp.Mapping[str, jax.Array]):
def __init__(
self,
broadcast_rngs: dict[str, jax.Array],
split_rngs: dict[str, jax.Array],
):
self.broadcasts = broadcast_rngs
self.splits = split_rngs
def __getitem__(self, key: str) -> jax.Array:
if key in self.broadcasts:
return self.broadcasts[key]
elif key in self.splits:
return self.splits[key]
else:
raise KeyError(f'Key "{key}" not found in SplitRng.')
def __iter__(self) -> tp.Iterator[str]:
yield from self.broadcasts
yield from self.splits
def __len__(self) -> int:
return len(self.broadcasts) + len(self.splits)
jax.tree_util.register_pytree_with_keys(
ForkedKeys,
functools.partial(_split_rng_flatten, with_keys=True),
_split_rng_unflatten,
flatten_func=functools.partial(_split_rng_flatten, with_keys=False),
)
def _split_rng_unflatten(
metadata: tuple[tuple[str, ...], tuple[str, ...]],
nodes: tuple[jax.Array, ...],
):
broadcast_names, split_names = metadata
num_broadcasts = len(broadcast_names)
rngs = ForkedKeys(
dict(zip(broadcast_names, nodes[:num_broadcasts])),
dict(zip(split_names, nodes[num_broadcasts:])),
)
return rngs | null |
22,732 | import jax
import jax.core
from jax.core import MainTrace
from flax.experimental.nnx.nnx import reprlib
def get_top_trace(pytree: tp.Union[tp.Any, Tracer]) -> MainTrace:
"""Returns the main top trace of a sequence of tracers."""
if isinstance(pytree, Tracer):
return pytree._trace.main
return jax.core.find_top_trace(jax.tree_util.tree_leaves(pytree)).main
The provided code snippet includes necessary dependencies for implementing the `current_jax_trace` function. Write a Python function `def current_jax_trace() -> MainTrace` to solve the following problem:
Returns the innermost Jax tracer.
Here is the function:
def current_jax_trace() -> MainTrace:
"""Returns the innermost Jax tracer."""
return get_top_trace(()) | Returns the innermost Jax tracer. |
22,733 | import contextlib
import dataclasses
import threading
import typing as tp
from abc import ABC, abstractmethod
class Object:
type: tp.Union[str, type]
start: str = '('
end: str = ')'
value_sep: str = '='
elem_indent: str = ' '
empty_repr: str = ''
class Attr:
key: str
value: tp.Union[str, tp.Any]
start: str = ''
end: str = ''
class Representable(ABC):
__slots__ = ()
def __nnx_repr__(self) -> tp.Iterator[tp.Union[Object, Attr]]:
raise NotImplementedError
def __repr__(self) -> str:
return get_repr(self)
def add_indent(indent: str) -> tp.Iterator[None]:
REPR_CONTEXT.indent_stack.append(REPR_CONTEXT.indent_stack[-1] + indent)
try:
yield
finally:
REPR_CONTEXT.indent_stack.pop()
def get_indent() -> str:
return REPR_CONTEXT.indent_stack[-1]
def get_repr(obj: Representable) -> str:
if not isinstance(obj, Representable):
raise TypeError(f'Object {obj!r} is not representable')
iterator = obj.__nnx_repr__()
config = next(iterator)
if not isinstance(config, Object):
raise TypeError(f'First item must be Config, got {type(config).__name__}')
def _repr_elem(elem: tp.Any) -> str:
if not isinstance(elem, Attr):
raise TypeError(f'Item must be Elem, got {type(elem).__name__}')
value = elem.value if isinstance(elem.value, str) else repr(elem.value)
if '\n' in value and not isinstance(elem.value, Representable):
value = value.replace('\n', '\n' + get_indent())
return (
f'{get_indent()}{elem.start}{elem.key}{config.value_sep}{value}{elem.end}'
)
with add_indent(config.elem_indent):
elems = list(map(_repr_elem, iterator))
elems = ',\n'.join(elems)
if elems:
elems = '\n' + elems + '\n' + get_indent()
else:
elems = config.empty_repr
type_repr = (
config.type if isinstance(config.type, str) else config.type.__name__
)
return f'{type_repr}{config.start}{elems}{config.end}' | null |
22,734 | from __future__ import annotations
import inspect
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
import optax
from flax.experimental.nnx.nnx import pytreelib
from flax.experimental.nnx.nnx.module import GraphDef, Module
from flax.experimental.nnx.nnx.proxy_caller import ApplyCaller
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
The provided code snippet includes necessary dependencies for implementing the `has_keyword_arg` function. Write a Python function `def has_keyword_arg(func: tp.Callable[..., tp.Any], name: str) -> bool` to solve the following problem:
Return True if func has keyword-only arguments with the given name.
Here is the function:
def has_keyword_arg(func: tp.Callable[..., tp.Any], name: str) -> bool:
"""Return True if func has keyword-only arguments with the given name."""
return any(
param.name == name
and param.kind in (param.KEYWORD_ONLY, param.POSITIONAL_OR_KEYWORD)
for param in inspect.signature(func).parameters.values()
) | Return True if func has keyword-only arguments with the given name. |
22,735 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Node = tp.TypeVar('Node')
Leaf = tp.TypeVar('Leaf')
AuxData = tp.TypeVar('AuxData')
NODE_TYPES: dict[type, 'NodeImpl[tp.Any, tp.Any, tp.Any]'] = {}
class ImmutableNodeImpl(NodeImplBase[Node, Leaf, AuxData]):
unflatten: tp.Callable[[tuple[tuple[str, Leaf], ...], AuxData], Node]
def register_immutable_node_type(
type: type,
flatten: tp.Callable[[Node], tuple[tp.Sequence[tuple[str, Leaf]], AuxData]],
unflatten: tp.Callable[[tuple[tuple[str, Leaf], ...], AuxData], Node],
):
NODE_TYPES[type] = ImmutableNodeImpl(
type=type, flatten=flatten, unflatten=unflatten
) | null |
22,736 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Node = tp.TypeVar('Node')
Leaf = tp.TypeVar('Leaf')
AuxData = tp.TypeVar('AuxData')
NODE_TYPES: dict[type, 'NodeImpl[tp.Any, tp.Any, tp.Any]'] = {}
class MutableNodeImpl(NodeImplBase[Node, Leaf, AuxData]):
set_key: tp.Callable[[Node, str, Leaf], None]
pop_key: tp.Callable[[Node, str], Leaf]
create_empty: tp.Callable[[AuxData], Node]
clear: tp.Callable[[Node, AuxData], None]
def init(self, node: Node, items: tuple[tuple[str, Leaf], ...]):
for key, value in items:
self.set_key(node, key, value)
def register_mutable_node_type(
type: type,
flatten: tp.Callable[[Node], tuple[tp.Sequence[tuple[str, Leaf]], AuxData]],
set_key: tp.Callable[[Node, str, Leaf], None],
pop_key: tp.Callable[[Node, str], Leaf],
create_empty: tp.Callable[[AuxData], Node],
clear: tp.Callable[[Node, AuxData], None],
):
NODE_TYPES[type] = MutableNodeImpl(
type=type,
flatten=flatten,
set_key=set_key,
pop_key=pop_key,
create_empty=create_empty,
clear=clear,
) | null |
22,737 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
class GraphDef(tp.Generic[Node], reprlib.Representable):
__slots__ = (
'_type',
'_index',
'_attributes',
'_subgraphs',
'_static_fields',
'_variables',
'_metadata',
)
def __init__(
self,
type: tp.Type[Node],
index: int,
attributes: tuple[str, ...],
subgraphs: tp.Iterable[tuple[str, tp.Union['GraphDef[tp.Any]', int]]],
static_fields: tp.Iterable[tuple[str, tp.Any]],
variables: tp.Iterable[tuple[str, VariableDef | int]],
metadata: tp.Any,
):
self._type: type[Node] = type
self._index = index
self._attributes = attributes
self._subgraphs = _HashableMapping(subgraphs)
self._static_fields = _HashableMapping(static_fields)
self._variables = _HashableMapping(variables)
self._metadata = metadata
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
yield reprlib.Attr('type', self._type.__name__)
yield reprlib.Attr('index', self._index)
yield reprlib.Attr('attributes', self._attributes)
yield reprlib.Attr('subgraphs', _MappingRepr(self._subgraphs))
yield reprlib.Attr('static_fields', _MappingRepr(self._static_fields))
yield reprlib.Attr('variables', _MappingRepr(self._variables))
yield reprlib.Attr('metadata', self._metadata)
def __hash__(self) -> int:
return hash((self._type, self._subgraphs))
def __eq__(self, other: tp.Any) -> bool:
if not isinstance(other, GraphDef):
return False
return self._type == other._type and self._subgraphs == other._subgraphs
def type(self) -> tp.Type[Node]:
return self._type
def index(self) -> int:
return self._index
def attributes(self) -> tuple[str, ...]:
return self._attributes
def subgraphs(self):
return self._subgraphs
def static_fields(self):
return self._static_fields
def variables(self):
return self._variables
def metadata(self) -> tp.Any:
return self._metadata
def merge(self, state: State, /, *states: State) -> Node:
if states:
state = State.merge(state, *states)
return graph_unflatten(self, state)[0]
def apply(
self, state: State, *states: State
) -> ApplyCaller[tuple[State, 'GraphDef[Node]']]:
accessor = DelayedAccessor()
def _apply(
accessor: DelayedAccessor, *args, **kwargs
) -> tuple[tp.Any, tuple[State, GraphDef[Node]]]:
module = self.merge(state, *states)
fn = accessor(module)
out = fn(*args, **kwargs)
return out, graph_flatten(module)[:2]
return CallableProxy(_apply, accessor) # type: ignore
def make_empty(self) -> Node:
return self.merge(State({}))
def _gradphdef_flatten(graphdef: GraphDef[tp.Any]):
return (), (
graphdef._type,
graphdef._index,
graphdef._attributes,
graphdef._subgraphs,
graphdef._static_fields,
graphdef._variables,
graphdef._metadata,
) | null |
22,738 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Node = tp.TypeVar('Node')
class GraphDef(tp.Generic[Node], reprlib.Representable):
__slots__ = (
'_type',
'_index',
'_attributes',
'_subgraphs',
'_static_fields',
'_variables',
'_metadata',
)
def __init__(
self,
type: tp.Type[Node],
index: int,
attributes: tuple[str, ...],
subgraphs: tp.Iterable[tuple[str, tp.Union['GraphDef[tp.Any]', int]]],
static_fields: tp.Iterable[tuple[str, tp.Any]],
variables: tp.Iterable[tuple[str, VariableDef | int]],
metadata: tp.Any,
):
self._type: type[Node] = type
self._index = index
self._attributes = attributes
self._subgraphs = _HashableMapping(subgraphs)
self._static_fields = _HashableMapping(static_fields)
self._variables = _HashableMapping(variables)
self._metadata = metadata
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
yield reprlib.Attr('type', self._type.__name__)
yield reprlib.Attr('index', self._index)
yield reprlib.Attr('attributes', self._attributes)
yield reprlib.Attr('subgraphs', _MappingRepr(self._subgraphs))
yield reprlib.Attr('static_fields', _MappingRepr(self._static_fields))
yield reprlib.Attr('variables', _MappingRepr(self._variables))
yield reprlib.Attr('metadata', self._metadata)
def __hash__(self) -> int:
return hash((self._type, self._subgraphs))
def __eq__(self, other: tp.Any) -> bool:
if not isinstance(other, GraphDef):
return False
return self._type == other._type and self._subgraphs == other._subgraphs
def type(self) -> tp.Type[Node]:
return self._type
def index(self) -> int:
return self._index
def attributes(self) -> tuple[str, ...]:
return self._attributes
def subgraphs(self):
return self._subgraphs
def static_fields(self):
return self._static_fields
def variables(self):
return self._variables
def metadata(self) -> tp.Any:
return self._metadata
def merge(self, state: State, /, *states: State) -> Node:
if states:
state = State.merge(state, *states)
return graph_unflatten(self, state)[0]
def apply(
self, state: State, *states: State
) -> ApplyCaller[tuple[State, 'GraphDef[Node]']]:
accessor = DelayedAccessor()
def _apply(
accessor: DelayedAccessor, *args, **kwargs
) -> tuple[tp.Any, tuple[State, GraphDef[Node]]]:
module = self.merge(state, *states)
fn = accessor(module)
out = fn(*args, **kwargs)
return out, graph_flatten(module)[:2]
return CallableProxy(_apply, accessor) # type: ignore
def make_empty(self) -> Node:
return self.merge(State({}))
class Empty:
def __repr__(self):
return 'Empty'
def __eq__(self, other):
return isinstance(other, Empty)
def __hash__(self):
return hash(Empty)
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
def _graphdef_unflatten(
metadata: tuple[
tp.Type[Node],
int,
tuple[str, ...],
tuple[tuple[str, GraphDef[Node] | int], ...],
tuple[tuple[str, tp.Any], ...],
tuple[tuple[str, Variable[Empty] | int], ...],
tp.Any,
],
_,
) -> GraphDef[Node]:
return GraphDef(*metadata) | null |
22,739 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Index = int
Node = tp.TypeVar('Node')
class GraphDef(tp.Generic[Node], reprlib.Representable):
__slots__ = (
'_type',
'_index',
'_attributes',
'_subgraphs',
'_static_fields',
'_variables',
'_metadata',
)
def __init__(
self,
type: tp.Type[Node],
index: int,
attributes: tuple[str, ...],
subgraphs: tp.Iterable[tuple[str, tp.Union['GraphDef[tp.Any]', int]]],
static_fields: tp.Iterable[tuple[str, tp.Any]],
variables: tp.Iterable[tuple[str, VariableDef | int]],
metadata: tp.Any,
):
self._type: type[Node] = type
self._index = index
self._attributes = attributes
self._subgraphs = _HashableMapping(subgraphs)
self._static_fields = _HashableMapping(static_fields)
self._variables = _HashableMapping(variables)
self._metadata = metadata
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
yield reprlib.Attr('type', self._type.__name__)
yield reprlib.Attr('index', self._index)
yield reprlib.Attr('attributes', self._attributes)
yield reprlib.Attr('subgraphs', _MappingRepr(self._subgraphs))
yield reprlib.Attr('static_fields', _MappingRepr(self._static_fields))
yield reprlib.Attr('variables', _MappingRepr(self._variables))
yield reprlib.Attr('metadata', self._metadata)
def __hash__(self) -> int:
return hash((self._type, self._subgraphs))
def __eq__(self, other: tp.Any) -> bool:
if not isinstance(other, GraphDef):
return False
return self._type == other._type and self._subgraphs == other._subgraphs
def type(self) -> tp.Type[Node]:
return self._type
def index(self) -> int:
return self._index
def attributes(self) -> tuple[str, ...]:
return self._attributes
def subgraphs(self):
return self._subgraphs
def static_fields(self):
return self._static_fields
def variables(self):
return self._variables
def metadata(self) -> tp.Any:
return self._metadata
def merge(self, state: State, /, *states: State) -> Node:
if states:
state = State.merge(state, *states)
return graph_unflatten(self, state)[0]
def apply(
self, state: State, *states: State
) -> ApplyCaller[tuple[State, 'GraphDef[Node]']]:
accessor = DelayedAccessor()
def _apply(
accessor: DelayedAccessor, *args, **kwargs
) -> tuple[tp.Any, tuple[State, GraphDef[Node]]]:
module = self.merge(state, *states)
fn = accessor(module)
out = fn(*args, **kwargs)
return out, graph_flatten(module)[:2]
return CallableProxy(_apply, accessor) # type: ignore
def make_empty(self) -> Node:
return self.merge(State({}))
def _graph_unflatten(
graphdef: tp.Union[GraphDef[Node], int],
state: dict[str, Variable[tp.Any] | dict[str, tp.Any]],
index_to_ref: dict[Index, tp.Any],
ref_cache: dict[Index, tp.Any] | None,
) -> Node:
"""Recursive helper for graph_unflatten.
Args:
graphdef: A GraphDef instance or an index to a node in the cache.
state: A mapping from attribute names to variables or subgraphs.
index_to_ref: A mapping from indexes to nodes that have been traversed.
If a node is already in the cache, it won't be traversed again.
ref_cache: A mapping from indexes to existing nodes that can be reused.
When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the
object in an empty state and then filled by the unflatten process, as a result
existing graph nodes are mutated to have the new content/topology
specified by the graphdef.
"""
if isinstance(graphdef, int):
return index_to_ref[graphdef]
if not is_node_type(graphdef.type):
raise RuntimeError(f'Unsupported type: {graphdef.type}, this is a bug.')
if graphdef.index in index_to_ref:
raise RuntimeError(f'GraphDef index {graphdef.index} already used.')
# TODO(cgarciae): why copy here?
state = state.copy()
node_impl = get_node_impl_for_type(graphdef.type)
def _get_children():
new_state: dict[str, tp.Any] = {}
for key in graphdef.attributes:
if key in graphdef.static_fields:
new_state[key] = graphdef.static_fields[key]
elif key not in state:
# if key is not present create an empty types
if key in graphdef.subgraphs:
# if the key is a subgraph we create an empty node
subgraphdef = graphdef.subgraphs[key]
if isinstance(subgraphdef, int):
# subgraph exists, take it from the cache
new_state[key] = index_to_ref[subgraphdef]
else:
# create an empty node and add it to the cache
substate = {}
node = new_state[key] = _graph_unflatten(
subgraphdef, substate, index_to_ref, ref_cache
)
index_to_ref[subgraphdef.index] = node
elif key in graphdef.variables:
variable_def = graphdef.variables[key]
if isinstance(variable_def, int):
# variable exists, take it from the cache
new_state[key] = index_to_ref[variable_def]
else:
# create an empty variable and add it to the cache
if ref_cache is not None and variable_def.index in ref_cache:
node = ref_cache[variable_def.index]
if type(node) != variable_def.type:
raise ValueError(
f'Expected a node of type {variable_def.type.__name__} for '
f'index {variable_def.index}, but got a node of type '
f'{type(node).__name__}.'
)
assert isinstance(node, Variable)
node.copy_from_def(variable_def, EMPTY)
else:
node = variable_def.to_variable(EMPTY)
new_state[key] = node
index_to_ref[variable_def.index] = node
else:
raise RuntimeError(f'Unknown static field: {key!r}')
else:
value = state[key]
if key in graphdef.subgraphs:
if isinstance(value, Variable):
raise ValueError(
f'Expected a subgraph for {key!r}, but got a Variable.'
)
subgraphdef = graphdef.subgraphs[key]
if isinstance(subgraphdef, int):
node = index_to_ref[subgraphdef]
else:
node = new_state[key] = _graph_unflatten(
subgraphdef, value, index_to_ref, ref_cache
)
index_to_ref[subgraphdef.index] = node
elif key in graphdef.variables:
variable_def = graphdef.variables[key]
if isinstance(variable_def, int):
new_state[key] = index_to_ref[variable_def]
else:
if type(value) != variable_def.type:
raise ValueError(
f'Expected a Variable of type {variable_def.type} '
f'for {key!r}, but got a Variable of type {type(value)}.'
)
assert isinstance(value, Variable)
if ref_cache is not None and variable_def.index in ref_cache:
variable = ref_cache[variable_def.index]
if type(variable) != variable_def.type:
raise ValueError(
f'Expected a Variable of type {variable_def.type} for '
f'{key!r}, but got a Variable of type {type(variable)}.'
)
variable.copy_from(value)
else:
assert isinstance(value, Variable)
variable = value.copy()
new_state[key] = variable
index_to_ref[variable_def.index] = variable
for new_key in set(state) - set(graphdef.attributes):
new_state[new_key] = state[new_key]
return new_state
if isinstance(node_impl, MutableNodeImpl):
# we create an empty node first and add it to the index
# this avoids infinite recursion when there is a reference cycle
if ref_cache is not None and graphdef.index in ref_cache:
node = ref_cache[graphdef.index]
if type(node) != graphdef.type:
raise ValueError(
f'Expected a node of type {graphdef.type} for index '
f'{graphdef.index}, but got a node of type {type(node)}.'
)
node_impl.clear(node, graphdef.metadata)
else:
node = node_impl.create_empty(graphdef.metadata)
index_to_ref[graphdef.index] = node
children = _get_children()
node_impl.init(node, tuple(children.items()))
else:
# if the node type does not support the creation of an empty object it means
# that it cannot reference itself, so we can create its children first
children = _get_children()
node = node_impl.unflatten(tuple(children.items()), graphdef.metadata)
index_to_ref[graphdef.index] = node
return node
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
The provided code snippet includes necessary dependencies for implementing the `graph_unflatten` function. Write a Python function `def graph_unflatten( graphdef: GraphDef[Node], state: State, /, *, ref_cache: dict[Index, tp.Any] | None = None, ) -> tuple[Node, dict[Index, tp.Any]]` to solve the following problem:
Unflattens a graphdef into a node with the given state. Args: graphdef: A GraphDef instance. state: A State instance. ref_cache: A mapping from indexes to existing nodes that can be reused. When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the object in an empty state and then filled by the unflatten process, as a result existing graph nodes are mutated to have the new content/topology specified by the graphdef.
Here is the function:
def graph_unflatten(
graphdef: GraphDef[Node],
state: State,
/,
*,
ref_cache: dict[Index, tp.Any] | None = None,
) -> tuple[Node, dict[Index, tp.Any]]:
"""Unflattens a graphdef into a node with the given state.
Args:
graphdef: A GraphDef instance.
state: A State instance.
ref_cache: A mapping from indexes to existing nodes that can be reused.
When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the
object in an empty state and then filled by the unflatten process, as a result
existing graph nodes are mutated to have the new content/topology
specified by the graphdef.
"""
index_to_ref: dict[Index, tp.Any] = {}
node = _graph_unflatten(graphdef, state.raw_mapping, index_to_ref, ref_cache)
return node, index_to_ref | Unflattens a graphdef into a node with the given state. Args: graphdef: A GraphDef instance. state: A State instance. ref_cache: A mapping from indexes to existing nodes that can be reused. When an reference is reused, ``GraphNodeImpl.clear`` is called to leave the object in an empty state and then filled by the unflatten process, as a result existing graph nodes are mutated to have the new content/topology specified by the graphdef. |
22,740 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Index = int
def _graph_pop(
node: tp.Any,
id_to_index: dict[int, Index],
path_parts: PathParts,
states: tuple[dict[Path, tp.Any], ...],
predicates: tuple[filterlib.Predicate, ...],
) -> None:
if not is_node(node):
raise RuntimeError(f'Unsupported type: {type(node)}, this is a bug.')
if id(node) in id_to_index:
return
id_to_index[id(node)] = len(id_to_index)
node_impl = get_node_impl(node)
node_dict = node_impl.node_dict(node)
for name, value in node_dict.items():
if is_node(value):
_graph_pop(value, id_to_index, (*path_parts, name), states, predicates)
continue
elif not isinstance(value, Variable):
continue
elif id(value) in id_to_index:
continue
path = '/'.join((*path_parts, name))
node_impl = get_node_impl(node)
for state, predicate in zip(states, predicates):
if predicate(path, value):
if isinstance(node_impl, ImmutableNodeImpl):
raise ValueError(
f'Cannot pop key {name!r} from node of type {type(node).__name__}'
)
state[path] = value.copy()
id_to_index[id(value)] = len(id_to_index)
node_impl.pop_key(node, name)
break
else:
# NOTE: should we raise an error here?
pass
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
PathParts = Tuple[str, ...]
def graph_pop(
node: tp.Any,
filters: tuple[filterlib.Filter, ...],
) -> tuple[State, ...]:
id_to_index: dict[int, Index] = {}
path_parts: PathParts = ()
predicates = tuple(filterlib.to_predicate(filter) for filter in filters)
states = tuple({} for _ in predicates)
_graph_pop(node, id_to_index, path_parts, states, predicates)
return tuple(State(x) for x in states) | null |
22,741 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def is_node(x: tp.Any) -> bool:
if isinstance(x, Variable):
return False
elif type(x) in NODE_TYPES:
return True
return is_pytree_node(x)
def _graph_update_dynamic(
node: tp.Any, state: dict[str, Variable[tp.Any] | dict[str, tp.Any]]
):
if not is_node(node):
raise RuntimeError(f'Unsupported type: {type(node)}')
node_impl = get_node_impl(node)
node_dict = node_impl.node_dict(node)
for key, value in state.items():
# case 1: new state is being added
if key not in node_dict:
if isinstance(node_impl, ImmutableNodeImpl):
raise ValueError(
f'Cannot set key {key!r} on immutable node of '
f'type {type(node).__name__}'
)
if isinstance(value, Variable):
value = value.copy()
node_impl.set_key(node, key, value)
continue
# check values are of the same type
current_value = node_dict[key]
# case 2: subgraph is being updated
if is_node(current_value):
if isinstance(value, Variable):
raise ValueError(
f'Expected a subgraph for {key!r}, but got a Variable: {value!r}'
)
_graph_update_dynamic(current_value, value)
else:
# case 3: Variable is being updated
# assert isinstance(value, Variable)
# assert isinstance(current_value, Variable)
if not isinstance(value, Variable):
raise ValueError(f'Expected a Variable for attribute {key!r}')
if not isinstance(current_value, Variable):
raise ValueError(
f'Trying to update a non-Variable attribute {key!r} with a Variable: '
f'{value!r}'
)
current_value.copy_from(value)
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
def graph_update_dynamic(
node: tp.Any,
updates: State | tp.Sequence[State],
) -> None:
if not is_node(node):
raise ValueError(f'Unsupported type: {type(node)}')
if isinstance(updates, State):
new_states = (updates,)
else:
new_states = updates
for state in new_states:
_graph_update_dynamic(node, state.raw_mapping) | null |
22,742 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Node = tp.TypeVar('Node')
class _StaticModuleStatus(enum.Enum):
NEW = enum.auto()
UPDATED = enum.auto()
def _graph_update_static(
node: Node,
updates: Node,
cache: dict[int, _StaticModuleStatus],
status: _StaticModuleStatus,
path: PathParts,
) -> None:
if type(node) != type(updates):
raise ValueError(
f'Trying to update a node with a different type: '
f'expected {type(node).__name__!r}, '
f'but got {type(updates).__name__!r}'
)
if not is_node(node):
raise ValueError(f'Unsupported node type: {type(node)}')
if id(updates) in cache:
if cache[id(updates)] != status:
str_path = '/'.join(path)
if status is _StaticModuleStatus.NEW:
raise ValueError(
f'Trying to add a new node at path {str_path!r} but a'
' node with the same reference has been updated'
)
else:
raise ValueError(
f'Trying to update a node at path {str_path!r} but a new'
' node with the same reference has been added'
)
return
cache[id(updates)] = status
node_impl = get_node_impl(node)
node_dict = node_impl.node_dict(node)
updates_dict = node_impl.node_dict(updates)
for name, value_updates in updates_dict.items():
# case 1: trying to update a Variable, skip
if isinstance(value_updates, Variable):
continue
elif is_node(value_updates):
# case 2: updating an existing subgraph
if name in node_dict:
_graph_update_static(
node_dict[name],
value_updates,
cache,
_StaticModuleStatus.UPDATED,
(*path, name),
)
else:
# case 3: adding a new subgraph
if isinstance(node_impl, ImmutableNodeImpl):
raise ValueError(
f'Cannot set key {name!r} on immutable node of '
f'type {type(node).__name__}'
)
# check if the subgraph is already in the cache
if id(value_updates) in cache:
# if its in the cache, check its status is not NEW
if cache[id(value_updates)] is not _StaticModuleStatus.NEW:
raise ValueError(
f'Trying to add a new node at path {name!r} but a '
'node with the same reference has been updated'
)
else:
cache[id(value_updates)] = _StaticModuleStatus.NEW
node_impl.set_key(node, name, value_updates)
else: # static field
if isinstance(node_impl, ImmutableNodeImpl):
if name in node_dict and node_dict[name] == value_updates:
# if the value is the same, skip
continue
# if trying
raise ValueError(
f'Cannot update key {name!r} on immutable node of '
f'type {type(node).__name__}. Current value is {node_dict[name]!r}, '
f'new value is {value_updates!r}.'
)
node_impl.set_key(node, name, value_updates)
def graph_update_static(node: Node, updates: Node) -> None:
cache: dict[int, _StaticModuleStatus] = {}
_graph_update_static(node, updates, cache, _StaticModuleStatus.UPDATED, ()) | null |
22,743 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
Node = tp.TypeVar('Node')
def graph_flatten(
x: Node,
) -> tuple[State, GraphDef[Node], tp.Mapping[tp.Any, Index]]:
ref_to_index = RefMap[tp.Any, Index]()
flat_state: dict[Path, Variable[tp.Any]] = {}
graphdef = _graph_flatten((), ref_to_index, flat_state, x)
assert not isinstance(graphdef, int)
return State.from_flat_path(flat_state), graphdef, ref_to_index
def clone(node: Node) -> Node:
state, static = graph_flatten(node)[:2]
return static.merge(state) | null |
22,744 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _iter_nodes(
node: tp.Any, visited: set[int], path_parts: PathParts
) -> tp.Iterator[tuple[Path, tp.Any]]:
if not is_node(node):
return
if id(node) in visited:
return
visited.add(id(node))
path = '/'.join(path_parts)
yield path, node
node_impl = get_node_impl(node)
node_dict = node_impl.node_dict(node)
for key, value in node_dict.items():
yield from _iter_nodes(value, visited, (*path_parts, key))
Path = str
PathParts = Tuple[str, ...]
def iter_nodes(node: tp.Any) -> tp.Iterator[tuple[Path, tp.Any]]:
visited: set[int] = set()
path_parts: PathParts = ()
yield from _iter_nodes(node, visited, path_parts) | null |
22,745 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
A = tp.TypeVar('A')
B = tp.TypeVar('B')
C = tp.TypeVar('C')
def compose_mapping(
map_ab: tp.Mapping[A, B], map_bc: tp.Mapping[B, C], /
) -> dict[A, C]:
return {a: map_bc[b] for a, b in map_ab.items() if b in map_bc} | null |
22,746 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
A = tp.TypeVar('A')
B = tp.TypeVar('B')
C = tp.TypeVar('C')
def compose_mapping_reversed(
map_ab: tp.Mapping[A, B], map_bc: tp.Mapping[B, C], /
) -> dict[C, A]:
return {map_bc[b]: a for a, b in map_ab.items() if b in map_bc} | null |
22,747 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _flatten_dict(
node: dict[str, tp.Any],
) -> tuple[tuple[tuple[str, tp.Any], ...], None]:
return tuple(node.items()), None | null |
22,748 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _set_key_dict(node: dict[str, tp.Any], key: str, value: tp.Any):
node[key] = value | null |
22,749 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _pop_key_dict(node: dict[str, tp.Any], key: str):
return node.pop(key) | null |
22,750 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _create_empty_dict(metadata: None) -> dict[str, tp.Any]:
return {} | null |
22,751 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _clear_dict(node: dict[str, tp.Any], metadata: None):
node.clear() | null |
22,752 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _flatten_list(
node: list[tp.Any],
) -> tuple[tuple[tuple[str, tp.Any], ...], int]:
return tuple((str(i), value) for i, value in enumerate(node)), len(node) | null |
22,753 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
EMPTY = Empty()
def _set_key_list(node: list[tp.Any], key: str, value: tp.Any):
int_key = int(key)
if int_key >= len(node):
node.extend([EMPTY] * (int_key - len(node) + 1))
node[int_key] = value | null |
22,754 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
EMPTY = Empty()
def _pop_key_list(node: list[tp.Any], key: str):
int_key = int(key)
value = node[int_key]
node[int_key] = EMPTY
return value | null |
22,755 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
EMPTY = Empty()
def _create_empty_list(length: int) -> list[tp.Any]:
return [EMPTY] * length | null |
22,756 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
EMPTY = Empty()
def _clear_list(node: list[tp.Any], length: int):
node.clear()
node.extend([EMPTY] * length) | null |
22,757 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
def _flatten_tuple(
node: tuple[tp.Any, ...],
) -> tuple[tuple[tuple[str, tp.Any], ...], int]:
return tuple((str(i), value) for i, value in enumerate(node)), len(node) | null |
22,758 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
EMPTY = Empty()
def _unflatten_tuple(
items: tuple[tuple[str, tp.Any], ...], length: int
) -> tuple[tp.Any, ...]:
node = [EMPTY] * length
for key, value in items:
node[int(key)] = value
return tuple(node) | null |
22,759 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
jax.tree_util.register_pytree_node(
GraphDef, _gradphdef_flatten, _graphdef_unflatten
)
jax.tree_util.register_static(Static)
def _key_path_to_str(key: tp.Any) -> str:
if isinstance(key, jax.tree_util.SequenceKey):
return str(key.idx)
elif isinstance(
key, (jax.tree_util.DictKey, jax.tree_util.FlattenedIndexKey)
):
return str(key.key)
elif isinstance(key, jax.tree_util.GetAttrKey):
return key.name
else:
return str(key)
def _flatten_pytree(pytree: tp.Any):
leaves, treedef = jax.tree_util.tree_flatten_with_path(
pytree, is_leaf=lambda x: x is not pytree
)
nodes = tuple((_key_path_to_str(path[0]), value) for path, value in leaves)
return nodes, treedef | null |
22,760 | from __future__ import annotations
import dataclasses
import enum
import typing as tp
import jax
from flax.experimental.nnx.nnx import filterlib, reprlib, tracers
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import EMPTY, Empty, Variable
from flax.typing import Path, PathParts
jax.tree_util.register_pytree_node(
GraphDef, _gradphdef_flatten, _graphdef_unflatten
)
jax.tree_util.register_static(Static)
def _unflatten_pytree(
nodes: tuple[tuple[str, tp.Any], ...], treedef: jax.tree_util.PyTreeDef
):
pytree = treedef.unflatten(value for _, value in nodes)
return pytree | null |
22,761 | from __future__ import annotations
import typing as tp
import jax
import jax.tree_util as jtu
from flax import traverse_util
from flax.experimental.nnx.nnx import filterlib, reprlib
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
def _state_flatten_with_keys(x: State):
items = sorted(x._mapping.items(), key=lambda item: item[0])
children = tuple((jtu.DictKey(key), value) for key, value in items)
return children, tuple(x._mapping.keys()) | null |
22,762 | from __future__ import annotations
import typing as tp
import jax
import jax.tree_util as jtu
from flax import traverse_util
from flax.experimental.nnx.nnx import filterlib, reprlib
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
def __getitem__(self, key: Key | int) -> Variable | State:
def __getattr__(self, key: Key) -> Variable | State:
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
def __delitem__(self, key: Key) -> None:
def __iter__(self) -> tp.Iterator[Key]:
def __len__(self) -> int:
def __nnx_repr__(self):
def flat_state(self) -> dict[Key, Variable[Variable]]:
def from_flat_path(cls, flat_state: FlatState, /) -> State:
def split(self, first: filterlib.Filter, /) -> 'State':
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
def merge(state: 'State', /, *states: 'State') -> 'State':
def __or__(self, other: 'State') -> 'State':
def __sub__(self, other: 'State') -> 'State':
class Variable(tp.Generic[A], reprlib.Representable):
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
def __getattr__(self, name: str) -> tp.Any:
def __setattr__(self, name: str, value: Any) -> None:
def _setattr(self, name: str, value: tp.Any):
def copy_from(self, other: 'Variable[A]') -> None:
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
def value(self) -> A:
def value(self, value: A):
def create_value(self, value: A):
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
def __eq__(self, other: object) -> bool:
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
def replace(self, **kwargs) -> 'Variable[A]':
def replace(self, **kwargs) -> 'Variable[tp.Any]':
def is_equivalent(self, other: tp.Any) -> bool:
def copy(self: 'Variable[A]') -> 'Variable[A]':
def __nnx_repr__(self):
def __init_subclass__(cls):
def on_get_value(self, value: A) -> A:
def on_set_value(self, value: A) -> A:
def on_create_value(self, value: A) -> A:
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
Path = str
def _state_unflatten(
static: tp.Tuple[Path, ...] | None,
leaves: tp.Tuple[Variable, ...] | tuple[dict[str, Variable]],
):
return State(zip(static, leaves)) if static else State(leaves[0]) | null |
22,763 | from __future__ import annotations
import typing as tp
import jax
import jax.tree_util as jtu
from flax import traverse_util
from flax.experimental.nnx.nnx import filterlib, reprlib
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
def _state_flatten(x: State):
return (x._mapping,), None | null |
22,764 | from __future__ import annotations
import typing as tp
import jax
import jax.tree_util as jtu
from flax import traverse_util
from flax.experimental.nnx.nnx import filterlib, reprlib
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
FlatState = dict[Path, Variable[Variable]]
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
def _split_state(
state: State,
*filters: filterlib.Filter,
) -> tp.Tuple[State, ...]:
for i, filter_ in enumerate(filters):
if filter_ is ... and i != len(filters) - 1:
raise ValueError(
'Ellipsis `...` can only be used as the last filter, '
f'got it at index {i}.'
)
predicates = tuple(map(filterlib.to_predicate, filters))
flat_state = state.flat_state()
# we have n + 1 states, where n is the number of predicates
# the last state is for values that don't match any predicate
flat_states: tp.Tuple[FlatState, ...] = tuple(
{} for _ in range(len(predicates) + 1)
)
for path, value in flat_state.items():
for i, predicate in enumerate(predicates):
if predicate(path, value):
flat_states[i][path] = value
break
else:
# if we didn't break, set leaf to last state
flat_states[-1][path] = value
return tuple(State.from_flat_path(flat_state) for flat_state in flat_states) | null |
22,765 | import dataclasses
import typing as tp
from typing import Any
from flax import linen
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.module import GraphDef, Module
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
M = tp.TypeVar('M', bound=Module)
class Functional(tp.Generic[M]):
module_type: tp.Type[M]
graphdef: tp.Optional[GraphDef[M]]
args: tuple[tp.Any, ...]
kwargs: dict[str, tp.Any]
def init(self, *, rngs: tp.Optional[Rngs] = None) -> State:
kwargs = {}
if rngs is not None:
kwargs['rngs'] = rngs
module = self.module_type(*self.args, **self.kwargs, **kwargs)
state, graphdef = module.split()
self.graphdef = graphdef
return state
def apply(self, *states: tp.Any):
assert self.graphdef is not None
return self.graphdef.apply(*states)
def functional(cls: tp.Type[M]) -> tp.Callable[..., Functional[M]]:
def _functional_constructor(*args: tp.Any, **kwargs: tp.Any) -> Functional[M]:
return Functional(cls, None, args, kwargs)
return _functional_constructor | null |
22,766 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class Module(reprlib.Representable, metaclass=ModuleMeta):
if tp.TYPE_CHECKING:
_module__state: ModuleState
if not tp.TYPE_CHECKING:
def __setattr__(self, name: str, value: Any) -> None:
self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any) -> None:
if not self._module__state.trace_state.is_valid():
raise errors.TraceContextError(
'Cannot mutate Module from different trace level'
)
if isinstance(value, (jax.Array, np.ndarray, State)):
raise ValueError(
f"Trying to assign a '{type(value).__name__}' to the Module"
f" attribute '{name}'. This is not supported. Non-hashable "
'objects are not valid static state in JAX. Please wrap '
'the value in a Variable type instead.'
)
object.__setattr__(self, name, value)
def __deepcopy__(self: M, memo=None) -> M:
state, graphdef = self.split()
graphdef = deepcopy(graphdef)
state = deepcopy(state)
return graphdef.merge(state)
def __hash__(self) -> int:
return hash(self._module__state.id)
def __nnx_repr__(self):
global SEEN_MODULES_REPR
if SEEN_MODULES_REPR is None:
SEEN_MODULES_REPR = set()
clear_seen = True
else:
clear_seen = False
if self._module__state.id in SEEN_MODULES_REPR:
yield reprlib.Object(type=type(self), empty_repr='...')
return
yield reprlib.Object(type=type(self))
SEEN_MODULES_REPR.add(self._module__state.id)
try:
for name, value in vars(self).items():
if isinstance(value, Module) or (
not isinstance(value, Variable) and not name.startswith('_')
):
yield reprlib.Attr(name, repr(value))
finally:
if clear_seen:
SEEN_MODULES_REPR = None
def init(cls: type[M], *args, **kwargs) -> tuple[State, GraphDef[M]]:
return cls(*args, **kwargs).split()
def create_abstract(cls: type[M]) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _create_abstract(accessor: DelayedAccessor, *args, **kwargs):
constructor = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
state, graphdef = jax.eval_shape(
lambda: constructor(*args, **lift_rngs(kwargs)).split()
)
return graphdef.merge(state)
return CallableProxy(_create_abstract) # type: ignore
def partial_init(cls: type[M], state: State, *states: State) -> type[M]:
"""Creates a constuctor that initializes the Module with the given state.
``partial_init`` takes one or more States and returns a constructor that uses
``jax.jit`` to initialize the Module and update its state with the given
States. Its semantically equivalent to::
module = MyModule(*args, **kwargs)
module.update(state, *states)
However, thanks to dead code elimination the resulting constructor will only
initialize the subset of ``Variable``'s that were part of the given state(s).
Example::
>>> import jax.numpy as jnp
>>> import jax
>>> from flax.experimental import nnx
...
>>> bias = jax.random.normal(jax.random.key(0), (4,))
>>> state = nnx.State({'bias': bias}) # in reality load it from a checkpoint
>>> linear = nnx.Linear.partial_init(state)(2, 4, rngs=nnx.Rngs(1))
>>> y = linear(jnp.ones((1, 2)))
...
>>> assert jnp.allclose(linear.bias, bias)
>>> assert y.shape == (1, 4)
Args:
state: The State to initialize the Module with.
*states: Additional States to initialize the Module with.
Returns:
A constructor that initializes the Module with the given States.
"""
states = (state, *states)
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _partial_init(accessor: DelayedAccessor, *args, **kwargs):
constructor: tp.Callable[[], M] = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
def _partial_init_constructor():
module = constructor(*args, **lift_rngs(kwargs))
module.update(*states)
return module.split()
graphdef: GraphDef[M]
state: State
state, graphdef = jax.jit(_partial_init_constructor)()
module = graphdef.merge(state)
return module
return CallableProxy(_partial_init) # type: ignore
def clone(self: M) -> M:
return merge(self.split())
def split(self: M) -> tuple[State, GraphDef[M]]:
...
def split(self: M, first: filterlib.Filter, /) -> tuple[State, GraphDef[M]]:
...
def split(
self: M,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
...
def split(
self: M, *filters: filterlib.Filter
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
state, graphdef, _ = graph_utils.graph_flatten(self)
if len(filters) == 0:
states = (state,)
elif len(filters) == 1:
states = (state.split(filters[0]),)
else:
states = state.split(filters[0], filters[1], *filters[2:])
return *states, graphdef
def get_state(self) -> State:
state, _ = self.split()
return state
def get_graphdef(self: M) -> GraphDef[M]:
_, graphdef = self.split()
return graphdef
def extract(self, first: filterlib.Filter, /) -> State:
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union[State, tuple[State, ...]]:
state = self.get_state()
if len(filters) == 0:
states = state.extract(first)
else:
states = state.extract(first, filters[0], *filters[1:])
return states
def pop(
self,
filter: filterlib.Filter,
/,
) -> State:
...
def pop(
self,
filter: filterlib.Filter,
filter2: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def pop(
self, *filters: filterlib.Filter
) -> tp.Union[State, tuple[State, ...]]:
if len(filters) == 0:
raise ValueError('Expected at least one filter')
states = graph_utils.graph_pop(self, filters)
if len(states) == 1:
return states[0]
else:
return states
def apply(self: M) -> ApplyCaller[M]:
def _apply(accessor: DelayedAccessor, *args, **kwargs) -> tuple[tp.Any, M]:
module = self.clone()
fn = accessor(module)
out = fn(*args, **kwargs)
return out, module
return CallableProxy(_apply) # type: ignore
def update(self: M, update: Updates[M], /, *updates: Updates[M]) -> None:
updates = (update, *updates)
def _states_and_moduledef(
updates,
) -> tuple[list[State], tp.Optional[Module]]:
leaves = jax.tree_util.tree_leaves(
updates, is_leaf=lambda x: isinstance(x, (GraphDef, State))
)
states: list[State] = []
module: tp.Optional[Module] = None
for leaf in leaves:
if isinstance(leaf, (Module, GraphDef)):
if module is not None:
raise ValueError(
'Expected only one GraphDef or Module in the updates'
)
if isinstance(leaf, Module):
module = leaf
states.append(leaf.get_state())
else:
module = leaf.make_empty()
elif isinstance(leaf, State):
states.append(leaf)
else:
raise ValueError(
'Expected a GraphDef, Module or State, got'
f' {type(leaf).__name__}'
)
return states, module
states, module_update = _states_and_moduledef(updates)
if module_update is not None:
graph_utils.graph_update_static(self, module_update)
if states:
graph_utils.graph_update_dynamic(self, states)
def sow(
self,
variable_type: tp.Type[variableslib.Variable[tp.Any]],
name: str,
value: A,
reduce_fn: tp.Callable[[B, A], B] = tuple_reduce,
init_fn: tp.Callable[[], B] = tuple_init, # type: ignore
) -> None:
if hasattr(self, name):
variable = getattr(self, name)
if not isinstance(variable, variableslib.Variable):
raise ValueError(
f"Expected '{name}' to be a Variable, got {type(variable).__name__}"
)
elif type(variable) != variable_type:
raise ValueError(
f"Expected '{name}' to be of type '{variable_type.__name__}', "
f"got '{type(variable).__name__}'"
)
variable.raw_value = reduce_fn(variable.raw_value, value)
else:
reduced_value = reduce_fn(init_fn(), value)
setattr(self, name, variable_type(reduced_value))
def modules(self) -> tp.Iterator[tuple[Path, Module]]:
for path, value in graph_utils.iter_nodes(self):
if isinstance(value, Module):
yield path, value
def set_attributes(
self,
*filters: filterlib.Filter,
raise_if_not_found: bool = True,
**attributes: tp.Any,
) -> None:
"""Sets the attributes of nested Modules including the current Module.
If the attribute is not found in the Module, it is ignored.
Example::
>>> from flax.experimental import nnx
...
>>> class Block(nnx.Module):
... def __init__(self, din, dout, *, rngs: nnx.Rngs):
... self.linear = nnx.Linear(din, dout, rngs=rngs)
... self.dropout = nnx.Dropout(0.5, deterministic=False)
... self.batch_norm = nnx.BatchNorm(10, use_running_average=False, rngs=rngs)
...
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(False, False)
>>> block.set_attributes(deterministic=True, use_running_average=True)
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, True)
``Filter``'s can be used to set the attributes of specific Modules::
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.set_attributes(nnx.Dropout, deterministic=True, use_running_average=True)
>>> # Only the dropout will be modified
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, False)
Args:
*filters: Filters to select the Modules to set the attributes of.
raise_if_not_found: If True (default), raises a ValueError if at least one attribute
instance is not found in one of the selected Modules.
**attributes: The attributes to set.
"""
remaining_attributes = set(attributes.keys())
if not filters:
filters = (True,)
predicates = tuple(map(filterlib.to_predicate, filters))
for path, module in self.modules():
for predicate in predicates:
if predicate(path, module):
for name, value in attributes.items():
if hasattr(module, name):
if name in remaining_attributes:
remaining_attributes.remove(name)
setattr(module, name, value)
break
if remaining_attributes and raise_if_not_found:
raise ValueError(
f'Could not find at least one instance of the following attributes: {remaining_attributes}'
)
def __init_subclass__(cls, experimental_pytree: bool = False) -> None:
super().__init_subclass__()
graph_utils.register_mutable_node_type(
type=cls,
flatten=_module_graph_flatten,
set_key=_module_graph_set_key,
pop_key=_module_graph_pop_key,
create_empty=_module_graph_create_empty,
clear=_module_graph_clear,
)
if experimental_pytree:
jtu.register_pytree_with_keys(
cls,
partial(_module_flatten, with_keys=True),
_module_unflatten,
flatten_func=partial(_module_flatten, with_keys=False),
)
def _module_flatten(module: Module, *, with_keys: bool):
state, graphdef = module.split()
variables = state.raw_mapping
paths = tuple(variables.keys())
if with_keys:
children = tuple(
(jtu.DictKey(path), variable) for path, variable in variables.items()
)
else:
children = tuple(variables.values())
return children, (paths, graphdef) | null |
22,767 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
M = tp.TypeVar('M', bound='Module')
def merge(
state_and_def: tuple[tpe.Unpack[tuple[State, ...]], GraphDef[M]],
) -> M:
# TODO: add docstring of example usage
*states, graphdef = state_and_def
return graphdef.merge(*states)
class GraphDef(tp.Generic[Node], reprlib.Representable):
__slots__ = (
'_type',
'_index',
'_attributes',
'_subgraphs',
'_static_fields',
'_variables',
'_metadata',
)
def __init__(
self,
type: tp.Type[Node],
index: int,
attributes: tuple[str, ...],
subgraphs: tp.Iterable[tuple[str, tp.Union['GraphDef[tp.Any]', int]]],
static_fields: tp.Iterable[tuple[str, tp.Any]],
variables: tp.Iterable[tuple[str, VariableDef | int]],
metadata: tp.Any,
):
self._type: type[Node] = type
self._index = index
self._attributes = attributes
self._subgraphs = _HashableMapping(subgraphs)
self._static_fields = _HashableMapping(static_fields)
self._variables = _HashableMapping(variables)
self._metadata = metadata
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
yield reprlib.Attr('type', self._type.__name__)
yield reprlib.Attr('index', self._index)
yield reprlib.Attr('attributes', self._attributes)
yield reprlib.Attr('subgraphs', _MappingRepr(self._subgraphs))
yield reprlib.Attr('static_fields', _MappingRepr(self._static_fields))
yield reprlib.Attr('variables', _MappingRepr(self._variables))
yield reprlib.Attr('metadata', self._metadata)
def __hash__(self) -> int:
return hash((self._type, self._subgraphs))
def __eq__(self, other: tp.Any) -> bool:
if not isinstance(other, GraphDef):
return False
return self._type == other._type and self._subgraphs == other._subgraphs
def type(self) -> tp.Type[Node]:
return self._type
def index(self) -> int:
return self._index
def attributes(self) -> tuple[str, ...]:
return self._attributes
def subgraphs(self):
return self._subgraphs
def static_fields(self):
return self._static_fields
def variables(self):
return self._variables
def metadata(self) -> tp.Any:
return self._metadata
def merge(self, state: State, /, *states: State) -> Node:
if states:
state = State.merge(state, *states)
return graph_unflatten(self, state)[0]
def apply(
self, state: State, *states: State
) -> ApplyCaller[tuple[State, 'GraphDef[Node]']]:
accessor = DelayedAccessor()
def _apply(
accessor: DelayedAccessor, *args, **kwargs
) -> tuple[tp.Any, tuple[State, GraphDef[Node]]]:
module = self.merge(state, *states)
fn = accessor(module)
out = fn(*args, **kwargs)
return out, graph_flatten(module)[:2]
return CallableProxy(_apply, accessor) # type: ignore
def make_empty(self) -> Node:
return self.merge(State({}))
class State(tp.MutableMapping[Key, tp.Any], reprlib.Representable):
def __init__(
self,
mapping: tp.Union[
tp.Mapping[Key, tp.Any],
tp.Iterator[tp.Tuple[Key, tp.Any]],
],
/,
):
if tp.TYPE_CHECKING:
self._mapping = dict(mapping)
else:
super().__setattr__('_mapping', dict(mapping))
def raw_mapping(self) -> dict[Key, dict[str, tp.Any] | tp.Any]:
return self._mapping
def __getitem__(self, key: Key | int) -> Variable | State:
if isinstance(key, int):
key = str(key)
value = self._mapping[key]
if isinstance(value, Variable):
return value
return State(value)
def __getattr__(self, key: Key) -> Variable | State:
if '_mapping' not in vars(self) or key not in self._mapping:
raise AttributeError(f'No attribute {key} in State')
return self[key]
def __setitem__(self, key: Key | int, value: Variable | State) -> None:
if isinstance(key, int):
key = str(key)
if not isinstance(value, (Variable, State)):
raise ValueError(
f'Trying to set key {key} to a value'
f' that is not a Variable or State, got: {value}.'
)
if isinstance(value, State):
self._mapping[key] = value._mapping
else:
self._mapping[key] = value
__setattr__ = __setitem__
def __delitem__(self, key: Key) -> None:
del self._mapping[key]
def __iter__(self) -> tp.Iterator[Key]:
return iter(self._mapping)
def __len__(self) -> int:
return len(self._mapping)
def __nnx_repr__(self):
yield reprlib.Object(type(self), value_sep=': ', start='({', end='})')
for k, v in self.items():
if isinstance(v, State):
v = NestedStateRepr(v)
yield reprlib.Attr(repr(k), v)
def flat_state(self) -> dict[Key, Variable[Variable]]:
return traverse_util.flatten_dict(self._mapping, sep='/') # type: ignore
def from_flat_path(cls, flat_state: FlatState, /) -> State:
nested_state = traverse_util.unflatten_dict(flat_state, sep='/')
return cls(nested_state)
def split(self, first: filterlib.Filter, /) -> 'State':
...
def split(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def split(
self, first: filterlib.Filter, /, *filters: filterlib.Filter
) -> tp.Union['State', tp.Tuple['State', ...]]:
filters = (first, *filters)
*states, rest = _split_state(self, *filters)
if rest:
raise ValueError(
'Non-exhaustive filters, got a non-empty remainder: '
f'{list(rest.keys())}.\nUse `...` to match all remaining elements.'
)
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def extract(
self,
first: filterlib.Filter,
/,
) -> 'State':
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Tuple['State', ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union['State', tp.Tuple['State', ...]]:
*states, _rest = _split_state(self, first, *filters)
assert len(states) == len(filters) + 1
if len(states) == 1:
states = states[0]
else:
states = tuple(states)
return states
def merge(state: 'State', /, *states: 'State') -> 'State':
states = (state, *states)
if len(states) == 1:
return states[0]
new_state: FlatState = {}
for state in states:
new_state.update(state.flat_state())
return State.from_flat_path(new_state)
def __or__(self, other: 'State') -> 'State':
if not other:
return self
return State.merge(self, other)
def __sub__(self, other: 'State') -> 'State':
if not other:
return self
self_flat = self.flat_state()
other_flat = other.flat_state()
diff = {k: v for k, v in self_flat.items() if k not in other_flat}
return State.from_flat_path(diff)
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
Path = str
def _module_unflatten(
paths_moduledef: tuple[tuple[Path, ...], GraphDef[M]],
variables: tuple[Variable[tp.Any], ...],
) -> M:
paths, graphdef = paths_moduledef
return graphdef.merge(State(zip(paths, variables))) | null |
22,768 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class Module(reprlib.Representable, metaclass=ModuleMeta):
if tp.TYPE_CHECKING:
_module__state: ModuleState
if not tp.TYPE_CHECKING:
def __setattr__(self, name: str, value: Any) -> None:
self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any) -> None:
if not self._module__state.trace_state.is_valid():
raise errors.TraceContextError(
'Cannot mutate Module from different trace level'
)
if isinstance(value, (jax.Array, np.ndarray, State)):
raise ValueError(
f"Trying to assign a '{type(value).__name__}' to the Module"
f" attribute '{name}'. This is not supported. Non-hashable "
'objects are not valid static state in JAX. Please wrap '
'the value in a Variable type instead.'
)
object.__setattr__(self, name, value)
def __deepcopy__(self: M, memo=None) -> M:
state, graphdef = self.split()
graphdef = deepcopy(graphdef)
state = deepcopy(state)
return graphdef.merge(state)
def __hash__(self) -> int:
return hash(self._module__state.id)
def __nnx_repr__(self):
global SEEN_MODULES_REPR
if SEEN_MODULES_REPR is None:
SEEN_MODULES_REPR = set()
clear_seen = True
else:
clear_seen = False
if self._module__state.id in SEEN_MODULES_REPR:
yield reprlib.Object(type=type(self), empty_repr='...')
return
yield reprlib.Object(type=type(self))
SEEN_MODULES_REPR.add(self._module__state.id)
try:
for name, value in vars(self).items():
if isinstance(value, Module) or (
not isinstance(value, Variable) and not name.startswith('_')
):
yield reprlib.Attr(name, repr(value))
finally:
if clear_seen:
SEEN_MODULES_REPR = None
def init(cls: type[M], *args, **kwargs) -> tuple[State, GraphDef[M]]:
return cls(*args, **kwargs).split()
def create_abstract(cls: type[M]) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _create_abstract(accessor: DelayedAccessor, *args, **kwargs):
constructor = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
state, graphdef = jax.eval_shape(
lambda: constructor(*args, **lift_rngs(kwargs)).split()
)
return graphdef.merge(state)
return CallableProxy(_create_abstract) # type: ignore
def partial_init(cls: type[M], state: State, *states: State) -> type[M]:
"""Creates a constuctor that initializes the Module with the given state.
``partial_init`` takes one or more States and returns a constructor that uses
``jax.jit`` to initialize the Module and update its state with the given
States. Its semantically equivalent to::
module = MyModule(*args, **kwargs)
module.update(state, *states)
However, thanks to dead code elimination the resulting constructor will only
initialize the subset of ``Variable``'s that were part of the given state(s).
Example::
>>> import jax.numpy as jnp
>>> import jax
>>> from flax.experimental import nnx
...
>>> bias = jax.random.normal(jax.random.key(0), (4,))
>>> state = nnx.State({'bias': bias}) # in reality load it from a checkpoint
>>> linear = nnx.Linear.partial_init(state)(2, 4, rngs=nnx.Rngs(1))
>>> y = linear(jnp.ones((1, 2)))
...
>>> assert jnp.allclose(linear.bias, bias)
>>> assert y.shape == (1, 4)
Args:
state: The State to initialize the Module with.
*states: Additional States to initialize the Module with.
Returns:
A constructor that initializes the Module with the given States.
"""
states = (state, *states)
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _partial_init(accessor: DelayedAccessor, *args, **kwargs):
constructor: tp.Callable[[], M] = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
def _partial_init_constructor():
module = constructor(*args, **lift_rngs(kwargs))
module.update(*states)
return module.split()
graphdef: GraphDef[M]
state: State
state, graphdef = jax.jit(_partial_init_constructor)()
module = graphdef.merge(state)
return module
return CallableProxy(_partial_init) # type: ignore
def clone(self: M) -> M:
return merge(self.split())
def split(self: M) -> tuple[State, GraphDef[M]]:
...
def split(self: M, first: filterlib.Filter, /) -> tuple[State, GraphDef[M]]:
...
def split(
self: M,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
...
def split(
self: M, *filters: filterlib.Filter
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
state, graphdef, _ = graph_utils.graph_flatten(self)
if len(filters) == 0:
states = (state,)
elif len(filters) == 1:
states = (state.split(filters[0]),)
else:
states = state.split(filters[0], filters[1], *filters[2:])
return *states, graphdef
def get_state(self) -> State:
state, _ = self.split()
return state
def get_graphdef(self: M) -> GraphDef[M]:
_, graphdef = self.split()
return graphdef
def extract(self, first: filterlib.Filter, /) -> State:
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union[State, tuple[State, ...]]:
state = self.get_state()
if len(filters) == 0:
states = state.extract(first)
else:
states = state.extract(first, filters[0], *filters[1:])
return states
def pop(
self,
filter: filterlib.Filter,
/,
) -> State:
...
def pop(
self,
filter: filterlib.Filter,
filter2: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def pop(
self, *filters: filterlib.Filter
) -> tp.Union[State, tuple[State, ...]]:
if len(filters) == 0:
raise ValueError('Expected at least one filter')
states = graph_utils.graph_pop(self, filters)
if len(states) == 1:
return states[0]
else:
return states
def apply(self: M) -> ApplyCaller[M]:
def _apply(accessor: DelayedAccessor, *args, **kwargs) -> tuple[tp.Any, M]:
module = self.clone()
fn = accessor(module)
out = fn(*args, **kwargs)
return out, module
return CallableProxy(_apply) # type: ignore
def update(self: M, update: Updates[M], /, *updates: Updates[M]) -> None:
updates = (update, *updates)
def _states_and_moduledef(
updates,
) -> tuple[list[State], tp.Optional[Module]]:
leaves = jax.tree_util.tree_leaves(
updates, is_leaf=lambda x: isinstance(x, (GraphDef, State))
)
states: list[State] = []
module: tp.Optional[Module] = None
for leaf in leaves:
if isinstance(leaf, (Module, GraphDef)):
if module is not None:
raise ValueError(
'Expected only one GraphDef or Module in the updates'
)
if isinstance(leaf, Module):
module = leaf
states.append(leaf.get_state())
else:
module = leaf.make_empty()
elif isinstance(leaf, State):
states.append(leaf)
else:
raise ValueError(
'Expected a GraphDef, Module or State, got'
f' {type(leaf).__name__}'
)
return states, module
states, module_update = _states_and_moduledef(updates)
if module_update is not None:
graph_utils.graph_update_static(self, module_update)
if states:
graph_utils.graph_update_dynamic(self, states)
def sow(
self,
variable_type: tp.Type[variableslib.Variable[tp.Any]],
name: str,
value: A,
reduce_fn: tp.Callable[[B, A], B] = tuple_reduce,
init_fn: tp.Callable[[], B] = tuple_init, # type: ignore
) -> None:
if hasattr(self, name):
variable = getattr(self, name)
if not isinstance(variable, variableslib.Variable):
raise ValueError(
f"Expected '{name}' to be a Variable, got {type(variable).__name__}"
)
elif type(variable) != variable_type:
raise ValueError(
f"Expected '{name}' to be of type '{variable_type.__name__}', "
f"got '{type(variable).__name__}'"
)
variable.raw_value = reduce_fn(variable.raw_value, value)
else:
reduced_value = reduce_fn(init_fn(), value)
setattr(self, name, variable_type(reduced_value))
def modules(self) -> tp.Iterator[tuple[Path, Module]]:
for path, value in graph_utils.iter_nodes(self):
if isinstance(value, Module):
yield path, value
def set_attributes(
self,
*filters: filterlib.Filter,
raise_if_not_found: bool = True,
**attributes: tp.Any,
) -> None:
"""Sets the attributes of nested Modules including the current Module.
If the attribute is not found in the Module, it is ignored.
Example::
>>> from flax.experimental import nnx
...
>>> class Block(nnx.Module):
... def __init__(self, din, dout, *, rngs: nnx.Rngs):
... self.linear = nnx.Linear(din, dout, rngs=rngs)
... self.dropout = nnx.Dropout(0.5, deterministic=False)
... self.batch_norm = nnx.BatchNorm(10, use_running_average=False, rngs=rngs)
...
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(False, False)
>>> block.set_attributes(deterministic=True, use_running_average=True)
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, True)
``Filter``'s can be used to set the attributes of specific Modules::
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.set_attributes(nnx.Dropout, deterministic=True, use_running_average=True)
>>> # Only the dropout will be modified
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, False)
Args:
*filters: Filters to select the Modules to set the attributes of.
raise_if_not_found: If True (default), raises a ValueError if at least one attribute
instance is not found in one of the selected Modules.
**attributes: The attributes to set.
"""
remaining_attributes = set(attributes.keys())
if not filters:
filters = (True,)
predicates = tuple(map(filterlib.to_predicate, filters))
for path, module in self.modules():
for predicate in predicates:
if predicate(path, module):
for name, value in attributes.items():
if hasattr(module, name):
if name in remaining_attributes:
remaining_attributes.remove(name)
setattr(module, name, value)
break
if remaining_attributes and raise_if_not_found:
raise ValueError(
f'Could not find at least one instance of the following attributes: {remaining_attributes}'
)
def __init_subclass__(cls, experimental_pytree: bool = False) -> None:
super().__init_subclass__()
graph_utils.register_mutable_node_type(
type=cls,
flatten=_module_graph_flatten,
set_key=_module_graph_set_key,
pop_key=_module_graph_pop_key,
create_empty=_module_graph_create_empty,
clear=_module_graph_clear,
)
if experimental_pytree:
jtu.register_pytree_with_keys(
cls,
partial(_module_flatten, with_keys=True),
_module_unflatten,
flatten_func=partial(_module_flatten, with_keys=False),
)
def _module_graph_flatten(module: Module):
nodes = tuple(
(name, value)
for name, value in vars(module).items()
if name != '_module__state'
)
return nodes, type(module) | null |
22,769 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class Module(reprlib.Representable, metaclass=ModuleMeta):
if tp.TYPE_CHECKING:
_module__state: ModuleState
if not tp.TYPE_CHECKING:
def __setattr__(self, name: str, value: Any) -> None:
self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any) -> None:
if not self._module__state.trace_state.is_valid():
raise errors.TraceContextError(
'Cannot mutate Module from different trace level'
)
if isinstance(value, (jax.Array, np.ndarray, State)):
raise ValueError(
f"Trying to assign a '{type(value).__name__}' to the Module"
f" attribute '{name}'. This is not supported. Non-hashable "
'objects are not valid static state in JAX. Please wrap '
'the value in a Variable type instead.'
)
object.__setattr__(self, name, value)
def __deepcopy__(self: M, memo=None) -> M:
state, graphdef = self.split()
graphdef = deepcopy(graphdef)
state = deepcopy(state)
return graphdef.merge(state)
def __hash__(self) -> int:
return hash(self._module__state.id)
def __nnx_repr__(self):
global SEEN_MODULES_REPR
if SEEN_MODULES_REPR is None:
SEEN_MODULES_REPR = set()
clear_seen = True
else:
clear_seen = False
if self._module__state.id in SEEN_MODULES_REPR:
yield reprlib.Object(type=type(self), empty_repr='...')
return
yield reprlib.Object(type=type(self))
SEEN_MODULES_REPR.add(self._module__state.id)
try:
for name, value in vars(self).items():
if isinstance(value, Module) or (
not isinstance(value, Variable) and not name.startswith('_')
):
yield reprlib.Attr(name, repr(value))
finally:
if clear_seen:
SEEN_MODULES_REPR = None
def init(cls: type[M], *args, **kwargs) -> tuple[State, GraphDef[M]]:
return cls(*args, **kwargs).split()
def create_abstract(cls: type[M]) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _create_abstract(accessor: DelayedAccessor, *args, **kwargs):
constructor = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
state, graphdef = jax.eval_shape(
lambda: constructor(*args, **lift_rngs(kwargs)).split()
)
return graphdef.merge(state)
return CallableProxy(_create_abstract) # type: ignore
def partial_init(cls: type[M], state: State, *states: State) -> type[M]:
"""Creates a constuctor that initializes the Module with the given state.
``partial_init`` takes one or more States and returns a constructor that uses
``jax.jit`` to initialize the Module and update its state with the given
States. Its semantically equivalent to::
module = MyModule(*args, **kwargs)
module.update(state, *states)
However, thanks to dead code elimination the resulting constructor will only
initialize the subset of ``Variable``'s that were part of the given state(s).
Example::
>>> import jax.numpy as jnp
>>> import jax
>>> from flax.experimental import nnx
...
>>> bias = jax.random.normal(jax.random.key(0), (4,))
>>> state = nnx.State({'bias': bias}) # in reality load it from a checkpoint
>>> linear = nnx.Linear.partial_init(state)(2, 4, rngs=nnx.Rngs(1))
>>> y = linear(jnp.ones((1, 2)))
...
>>> assert jnp.allclose(linear.bias, bias)
>>> assert y.shape == (1, 4)
Args:
state: The State to initialize the Module with.
*states: Additional States to initialize the Module with.
Returns:
A constructor that initializes the Module with the given States.
"""
states = (state, *states)
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _partial_init(accessor: DelayedAccessor, *args, **kwargs):
constructor: tp.Callable[[], M] = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
def _partial_init_constructor():
module = constructor(*args, **lift_rngs(kwargs))
module.update(*states)
return module.split()
graphdef: GraphDef[M]
state: State
state, graphdef = jax.jit(_partial_init_constructor)()
module = graphdef.merge(state)
return module
return CallableProxy(_partial_init) # type: ignore
def clone(self: M) -> M:
return merge(self.split())
def split(self: M) -> tuple[State, GraphDef[M]]:
...
def split(self: M, first: filterlib.Filter, /) -> tuple[State, GraphDef[M]]:
...
def split(
self: M,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
...
def split(
self: M, *filters: filterlib.Filter
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
state, graphdef, _ = graph_utils.graph_flatten(self)
if len(filters) == 0:
states = (state,)
elif len(filters) == 1:
states = (state.split(filters[0]),)
else:
states = state.split(filters[0], filters[1], *filters[2:])
return *states, graphdef
def get_state(self) -> State:
state, _ = self.split()
return state
def get_graphdef(self: M) -> GraphDef[M]:
_, graphdef = self.split()
return graphdef
def extract(self, first: filterlib.Filter, /) -> State:
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union[State, tuple[State, ...]]:
state = self.get_state()
if len(filters) == 0:
states = state.extract(first)
else:
states = state.extract(first, filters[0], *filters[1:])
return states
def pop(
self,
filter: filterlib.Filter,
/,
) -> State:
...
def pop(
self,
filter: filterlib.Filter,
filter2: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def pop(
self, *filters: filterlib.Filter
) -> tp.Union[State, tuple[State, ...]]:
if len(filters) == 0:
raise ValueError('Expected at least one filter')
states = graph_utils.graph_pop(self, filters)
if len(states) == 1:
return states[0]
else:
return states
def apply(self: M) -> ApplyCaller[M]:
def _apply(accessor: DelayedAccessor, *args, **kwargs) -> tuple[tp.Any, M]:
module = self.clone()
fn = accessor(module)
out = fn(*args, **kwargs)
return out, module
return CallableProxy(_apply) # type: ignore
def update(self: M, update: Updates[M], /, *updates: Updates[M]) -> None:
updates = (update, *updates)
def _states_and_moduledef(
updates,
) -> tuple[list[State], tp.Optional[Module]]:
leaves = jax.tree_util.tree_leaves(
updates, is_leaf=lambda x: isinstance(x, (GraphDef, State))
)
states: list[State] = []
module: tp.Optional[Module] = None
for leaf in leaves:
if isinstance(leaf, (Module, GraphDef)):
if module is not None:
raise ValueError(
'Expected only one GraphDef or Module in the updates'
)
if isinstance(leaf, Module):
module = leaf
states.append(leaf.get_state())
else:
module = leaf.make_empty()
elif isinstance(leaf, State):
states.append(leaf)
else:
raise ValueError(
'Expected a GraphDef, Module or State, got'
f' {type(leaf).__name__}'
)
return states, module
states, module_update = _states_and_moduledef(updates)
if module_update is not None:
graph_utils.graph_update_static(self, module_update)
if states:
graph_utils.graph_update_dynamic(self, states)
def sow(
self,
variable_type: tp.Type[variableslib.Variable[tp.Any]],
name: str,
value: A,
reduce_fn: tp.Callable[[B, A], B] = tuple_reduce,
init_fn: tp.Callable[[], B] = tuple_init, # type: ignore
) -> None:
if hasattr(self, name):
variable = getattr(self, name)
if not isinstance(variable, variableslib.Variable):
raise ValueError(
f"Expected '{name}' to be a Variable, got {type(variable).__name__}"
)
elif type(variable) != variable_type:
raise ValueError(
f"Expected '{name}' to be of type '{variable_type.__name__}', "
f"got '{type(variable).__name__}'"
)
variable.raw_value = reduce_fn(variable.raw_value, value)
else:
reduced_value = reduce_fn(init_fn(), value)
setattr(self, name, variable_type(reduced_value))
def modules(self) -> tp.Iterator[tuple[Path, Module]]:
for path, value in graph_utils.iter_nodes(self):
if isinstance(value, Module):
yield path, value
def set_attributes(
self,
*filters: filterlib.Filter,
raise_if_not_found: bool = True,
**attributes: tp.Any,
) -> None:
"""Sets the attributes of nested Modules including the current Module.
If the attribute is not found in the Module, it is ignored.
Example::
>>> from flax.experimental import nnx
...
>>> class Block(nnx.Module):
... def __init__(self, din, dout, *, rngs: nnx.Rngs):
... self.linear = nnx.Linear(din, dout, rngs=rngs)
... self.dropout = nnx.Dropout(0.5, deterministic=False)
... self.batch_norm = nnx.BatchNorm(10, use_running_average=False, rngs=rngs)
...
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(False, False)
>>> block.set_attributes(deterministic=True, use_running_average=True)
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, True)
``Filter``'s can be used to set the attributes of specific Modules::
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.set_attributes(nnx.Dropout, deterministic=True, use_running_average=True)
>>> # Only the dropout will be modified
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, False)
Args:
*filters: Filters to select the Modules to set the attributes of.
raise_if_not_found: If True (default), raises a ValueError if at least one attribute
instance is not found in one of the selected Modules.
**attributes: The attributes to set.
"""
remaining_attributes = set(attributes.keys())
if not filters:
filters = (True,)
predicates = tuple(map(filterlib.to_predicate, filters))
for path, module in self.modules():
for predicate in predicates:
if predicate(path, module):
for name, value in attributes.items():
if hasattr(module, name):
if name in remaining_attributes:
remaining_attributes.remove(name)
setattr(module, name, value)
break
if remaining_attributes and raise_if_not_found:
raise ValueError(
f'Could not find at least one instance of the following attributes: {remaining_attributes}'
)
def __init_subclass__(cls, experimental_pytree: bool = False) -> None:
super().__init_subclass__()
graph_utils.register_mutable_node_type(
type=cls,
flatten=_module_graph_flatten,
set_key=_module_graph_set_key,
pop_key=_module_graph_pop_key,
create_empty=_module_graph_create_empty,
clear=_module_graph_clear,
)
if experimental_pytree:
jtu.register_pytree_with_keys(
cls,
partial(_module_flatten, with_keys=True),
_module_unflatten,
flatten_func=partial(_module_flatten, with_keys=False),
)
class Variable(tp.Generic[A], reprlib.Representable):
raw_value: A
set_value_hooks: tuple[SetValueHook[A], ...]
get_value_hooks: tuple[GetValueHook[A], ...]
create_value_hooks: tuple[CreateValueHook[A], ...]
add_axis_hooks: tuple[AddAxisHook['Variable[A]'], ...]
remove_axis_hooks: tuple[RemoveAxisHook['Variable[A]'], ...]
_trace_state: tracers.TraceState
def __init__(
self,
value: tp.Union[A, VariableMetadata[A]],
set_value_hooks: tp.Union[
SetValueHook[A], tp.Sequence[SetValueHook[A]]
] = (),
get_value_hooks: tp.Union[
GetValueHook[A], tp.Sequence[GetValueHook[A]]
] = (),
create_value_hooks: tp.Union[
CreateValueHook[A], tp.Sequence[CreateValueHook[A]]
] = (),
add_axis_hooks: tp.Union[
AddAxisHook['Variable[A]'], tp.Sequence[AddAxisHook['Variable[A]']]
] = (),
remove_axis_hooks: tp.Union[
RemoveAxisHook['Variable[A]'],
tp.Sequence[RemoveAxisHook['Variable[A]']],
] = (),
**metadata: tp.Any,
):
vars(self)['_trace_state'] = tracers.TraceState()
if set_value_hooks:
if callable(set_value_hooks):
set_value_hooks = (set_value_hooks,)
else:
set_value_hooks = tuple(set_value_hooks)
else:
set_value_hooks = ()
if get_value_hooks:
if callable(get_value_hooks):
get_value_hooks = (get_value_hooks,)
else:
get_value_hooks = tuple(get_value_hooks)
else:
get_value_hooks = ()
if create_value_hooks:
if callable(create_value_hooks):
create_value_hooks = (create_value_hooks,)
else:
create_value_hooks = tuple(create_value_hooks)
else:
create_value_hooks = ()
if add_axis_hooks:
if callable(add_axis_hooks):
add_axis_hooks = (add_axis_hooks,)
else:
add_axis_hooks = tuple(add_axis_hooks)
else:
add_axis_hooks = ()
if remove_axis_hooks:
if callable(remove_axis_hooks):
remove_axis_hooks = (remove_axis_hooks,)
else:
remove_axis_hooks = tuple(remove_axis_hooks)
else:
remove_axis_hooks = ()
if isinstance(value, VariableMetadata):
value_metadata = dict(value.metadata)
if set_value_hooks and value.set_value_hooks:
set_value_hooks = set_value_hooks + value.set_value_hooks
elif value.set_value_hooks:
set_value_hooks = value.set_value_hooks
if get_value_hooks and value.get_value_hooks:
get_value_hooks = get_value_hooks + value.get_value_hooks
elif value.get_value_hooks:
get_value_hooks = value.get_value_hooks
if create_value_hooks and value.create_value_hooks:
create_value_hooks = create_value_hooks + value.create_value_hooks
elif value.create_value_hooks:
create_value_hooks = value.create_value_hooks
if add_axis_hooks and value.add_axis_hooks:
add_axis_hooks = add_axis_hooks + value.add_axis_hooks
elif value.add_axis_hooks:
add_axis_hooks = value.add_axis_hooks
if remove_axis_hooks and value.remove_axis_hooks:
remove_axis_hooks = remove_axis_hooks + value.remove_axis_hooks
elif value.remove_axis_hooks:
remove_axis_hooks = value.remove_axis_hooks
metadata.update(value_metadata)
value = tp.cast(A, value.raw_value)
if hasattr(self, 'on_get_value'):
on_get_value = getattr(type(self), 'on_get_value')
if on_get_value not in get_value_hooks:
get_value_hooks = (on_get_value, *get_value_hooks)
if hasattr(self, 'on_set_value'):
on_set_value = getattr(type(self), 'on_set_value')
if on_set_value not in set_value_hooks:
set_value_hooks = (on_set_value, *set_value_hooks)
if hasattr(self, 'on_create_value'):
on_create_value = getattr(type(self), 'on_create_value')
if on_create_value not in create_value_hooks:
create_value_hooks = (on_create_value, *create_value_hooks)
if hasattr(self, 'on_add_axis'):
on_add_axis = getattr(type(self), 'on_add_axis')
if on_add_axis not in add_axis_hooks:
add_axis_hooks = (on_add_axis, *add_axis_hooks)
if hasattr(self, 'on_remove_axis'):
on_remove_axis = getattr(type(self), 'on_remove_axis')
if on_remove_axis not in remove_axis_hooks:
remove_axis_hooks = (on_remove_axis, *remove_axis_hooks)
self.raw_value = value
self.get_value_hooks = get_value_hooks
self.set_value_hooks = set_value_hooks
self.create_value_hooks = create_value_hooks
self.add_axis_hooks = add_axis_hooks
self.remove_axis_hooks = remove_axis_hooks
vars(self).update(metadata)
# run create_value hooks
self.raw_value = self.create_value(self.raw_value)
if tp.TYPE_CHECKING:
def __getattr__(self, name: str) -> tp.Any:
...
else:
def __setattr__(self, name: str, value: Any) -> None:
return self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any):
if not self._trace_state.is_valid():
raise ValueError(
'Cannot mutate Variable from a different trace level'
)
object.__setattr__(self, name, value)
def copy_from(self, other: 'Variable[A]') -> None:
if not self.is_equivalent(other):
raise ValueError(
f'Cannot copy from incompatible container, '
f'expected {type(self).__name__}, got {type(other).__name__}'
)
if self is other:
return
vars_dict = vars(self)
vars_dict.clear()
vars_dict.update(vars(other))
def copy_from_def(self, other: 'nnx.graph_utils.VariableDef', /, value: A):
_trace_state = self._trace_state
variable_vars = vars(self)
variable_vars.clear()
variable_vars.update(other.metadata, _trace_state=_trace_state, raw_value=value)
def value(self) -> A:
value = self.raw_value
if self.get_value_hooks:
for hook in self.get_value_hooks:
value = hook(self, value)
return value
def value(self, value: A):
if isinstance(value, Variable):
raise ValueError(
'Cannot set value to a Variable, ' 'use `copy_from` method instead'
)
if self.set_value_hooks:
for hook in self.set_value_hooks:
value = hook(self, value)
self.raw_value = value
def create_value(self, value: A):
for hook in self.create_value_hooks:
value = hook(self, value)
return value
def add_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.add_axis_hooks:
hook(self, axis_name, axis_index)
def remove_axis(self, axis_name: AxisName, axis_index: AxisIndex):
for hook in self.remove_axis_hooks:
hook(self, axis_name, axis_index)
def __eq__(self, other: object) -> bool:
return type(self) is type(other) and vars(other) == vars(self)
def replace(self, *, value: B, **kwargs) -> 'Variable[B]':
...
def replace(self, **kwargs) -> 'Variable[A]':
...
def replace(self, **kwargs) -> 'Variable[tp.Any]':
# return `value` if it is a Variable
if 'raw_value' in kwargs and isinstance(
value := kwargs['raw_value'], Variable
):
# remove value from kwargs
kwargs.pop('raw_value')
if not self.is_equivalent(value):
raise ValueError(
'Cannot replace value from incompatible container, '
f'expected {type(self).__name__}, got {type(value).__name__}'
)
# if kwargs aren't empty, recursively call replace
# else return variable value
if kwargs:
return value.replace(**kwargs)
else:
return value
# get and update attributes
attributes = vars(self).copy()
attributes.update(**kwargs)
# return new instance with updated attributes
obj = object.__new__(type(self))
vars(obj).update(attributes)
return obj
def is_equivalent(self, other: tp.Any) -> bool:
return type(self) is type(other)
def copy(self: 'Variable[A]') -> 'Variable[A]':
obj = object.__new__(type(self))
attributes = vars(self).copy()
attributes['_trace_state'] = tracers.TraceState()
vars(obj).update(attributes)
return obj
def __nnx_repr__(self):
yield reprlib.Object(type=type(self))
for name, value in vars(self).items():
if name.endswith('_hooks') or name == "_trace_state":
continue
yield reprlib.Attr(name, repr(value))
def __init_subclass__(cls):
super().__init_subclass__()
jtu.register_pytree_with_keys(
cls,
partial(_variable_flatten, with_keys=True), # type: ignore
partial(_variable_unflatten, cls=cls), # type: ignore
flatten_func=partial(_variable_flatten, with_keys=False), # type: ignore
)
# hooks API
if tp.TYPE_CHECKING:
def on_get_value(self, value: A) -> A:
raise NotImplementedError
def on_set_value(self, value: A) -> A:
raise NotImplementedError
def on_create_value(self, value: A) -> A:
raise NotImplementedError
def on_add_axis(self: V, axis_name: AxisName, axis_index: AxisIndex) -> V:
raise NotImplementedError
def on_remove_axis(
self: V, axis_name: AxisName, axis_index: AxisIndex
) -> V:
raise NotImplementedError
def _module_graph_set_key(module: Module, name: str, value: tp.Any):
if (
hasattr(module, name)
and isinstance(variable := getattr(module, name), Variable)
and isinstance(value, Variable)
):
variable.copy_from(value)
else:
setattr(module, name, value) | null |
22,770 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
class Module(reprlib.Representable, metaclass=ModuleMeta):
def __setattr__(self, name: str, value: Any) -> None:
def _setattr(self, name: str, value: tp.Any) -> None:
def __deepcopy__(self: M, memo=None) -> M:
def __hash__(self) -> int:
def __nnx_repr__(self):
def init(cls: type[M], *args, **kwargs) -> tuple[State, GraphDef[M]]:
def create_abstract(cls: type[M]) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
def _create_abstract(accessor: DelayedAccessor, *args, **kwargs):
def partial_init(cls: type[M], state: State, *states: State) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
def _partial_init(accessor: DelayedAccessor, *args, **kwargs):
def _partial_init_constructor():
def clone(self: M) -> M:
def split(self: M) -> tuple[State, GraphDef[M]]:
def split(self: M, first: filterlib.Filter, /) -> tuple[State, GraphDef[M]]:
def split(
self: M,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
def split(
self: M, *filters: filterlib.Filter
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
def get_state(self) -> State:
def get_graphdef(self: M) -> GraphDef[M]:
def extract(self, first: filterlib.Filter, /) -> State:
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union[State, tuple[State, ...]]:
def pop(
self,
filter: filterlib.Filter,
/,
) -> State:
def pop(
self,
filter: filterlib.Filter,
filter2: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
def pop(
self, *filters: filterlib.Filter
) -> tp.Union[State, tuple[State, ...]]:
def apply(self: M) -> ApplyCaller[M]:
def _apply(accessor: DelayedAccessor, *args, **kwargs) -> tuple[tp.Any, M]:
def update(self: M, update: Updates[M], /, *updates: Updates[M]) -> None:
def _states_and_moduledef(
updates,
) -> tuple[list[State], tp.Optional[Module]]:
def sow(
self,
variable_type: tp.Type[variableslib.Variable[tp.Any]],
name: str,
value: A,
reduce_fn: tp.Callable[[B, A], B] = tuple_reduce,
init_fn: tp.Callable[[], B] = tuple_init, # type: ignore
) -> None:
def modules(self) -> tp.Iterator[tuple[Path, Module]]:
def set_attributes(
self,
*filters: filterlib.Filter,
raise_if_not_found: bool = True,
**attributes: tp.Any,
) -> None:
def __init_subclass__(cls, experimental_pytree: bool = False) -> None:
def _module_graph_pop_key(module: Module, name: str):
return vars(module).pop(name) | null |
22,771 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
M = tp.TypeVar('M', bound='Module')
class ModuleState(reprlib.Representable):
__slots__ = ('_trace_state', '_id')
def __init__(self):
self._trace_state = tracers.TraceState()
self._id = ids.uuid()
def trace_state(self) -> tracers.TraceState:
return self._trace_state
def id(self) -> ids.UUID:
return self._id
def __nnx_repr__(self):
yield reprlib.Object(type(self))
yield reprlib.Attr('trace_state', self._trace_state)
def _module_graph_create_empty(cls: tp.Type[M]) -> M:
module = object.__new__(cls)
vars(module).update(_module__state=ModuleState())
return module | null |
22,772 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
M = tp.TypeVar('M', bound='Module')
class Module(reprlib.Representable, metaclass=ModuleMeta):
if tp.TYPE_CHECKING:
_module__state: ModuleState
if not tp.TYPE_CHECKING:
def __setattr__(self, name: str, value: Any) -> None:
self._setattr(name, value)
def _setattr(self, name: str, value: tp.Any) -> None:
if not self._module__state.trace_state.is_valid():
raise errors.TraceContextError(
'Cannot mutate Module from different trace level'
)
if isinstance(value, (jax.Array, np.ndarray, State)):
raise ValueError(
f"Trying to assign a '{type(value).__name__}' to the Module"
f" attribute '{name}'. This is not supported. Non-hashable "
'objects are not valid static state in JAX. Please wrap '
'the value in a Variable type instead.'
)
object.__setattr__(self, name, value)
def __deepcopy__(self: M, memo=None) -> M:
state, graphdef = self.split()
graphdef = deepcopy(graphdef)
state = deepcopy(state)
return graphdef.merge(state)
def __hash__(self) -> int:
return hash(self._module__state.id)
def __nnx_repr__(self):
global SEEN_MODULES_REPR
if SEEN_MODULES_REPR is None:
SEEN_MODULES_REPR = set()
clear_seen = True
else:
clear_seen = False
if self._module__state.id in SEEN_MODULES_REPR:
yield reprlib.Object(type=type(self), empty_repr='...')
return
yield reprlib.Object(type=type(self))
SEEN_MODULES_REPR.add(self._module__state.id)
try:
for name, value in vars(self).items():
if isinstance(value, Module) or (
not isinstance(value, Variable) and not name.startswith('_')
):
yield reprlib.Attr(name, repr(value))
finally:
if clear_seen:
SEEN_MODULES_REPR = None
def init(cls: type[M], *args, **kwargs) -> tuple[State, GraphDef[M]]:
return cls(*args, **kwargs).split()
def create_abstract(cls: type[M]) -> type[M]:
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _create_abstract(accessor: DelayedAccessor, *args, **kwargs):
constructor = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
state, graphdef = jax.eval_shape(
lambda: constructor(*args, **lift_rngs(kwargs)).split()
)
return graphdef.merge(state)
return CallableProxy(_create_abstract) # type: ignore
def partial_init(cls: type[M], state: State, *states: State) -> type[M]:
"""Creates a constuctor that initializes the Module with the given state.
``partial_init`` takes one or more States and returns a constructor that uses
``jax.jit`` to initialize the Module and update its state with the given
States. Its semantically equivalent to::
module = MyModule(*args, **kwargs)
module.update(state, *states)
However, thanks to dead code elimination the resulting constructor will only
initialize the subset of ``Variable``'s that were part of the given state(s).
Example::
>>> import jax.numpy as jnp
>>> import jax
>>> from flax.experimental import nnx
...
>>> bias = jax.random.normal(jax.random.key(0), (4,))
>>> state = nnx.State({'bias': bias}) # in reality load it from a checkpoint
>>> linear = nnx.Linear.partial_init(state)(2, 4, rngs=nnx.Rngs(1))
>>> y = linear(jnp.ones((1, 2)))
...
>>> assert jnp.allclose(linear.bias, bias)
>>> assert y.shape == (1, 4)
Args:
state: The State to initialize the Module with.
*states: Additional States to initialize the Module with.
Returns:
A constructor that initializes the Module with the given States.
"""
states = (state, *states)
def lift_rngs(kwargs: dict[str, tp.Any]):
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], tp.Mapping):
kwargs['rngs'] = Rngs(rngs)
return kwargs
def _partial_init(accessor: DelayedAccessor, *args, **kwargs):
constructor: tp.Callable[[], M] = accessor(cls)
if 'rngs' in kwargs and isinstance(rngs := kwargs['rngs'], Rngs):
kwargs['rngs'] = rngs.fork()
def _partial_init_constructor():
module = constructor(*args, **lift_rngs(kwargs))
module.update(*states)
return module.split()
graphdef: GraphDef[M]
state: State
state, graphdef = jax.jit(_partial_init_constructor)()
module = graphdef.merge(state)
return module
return CallableProxy(_partial_init) # type: ignore
def clone(self: M) -> M:
return merge(self.split())
def split(self: M) -> tuple[State, GraphDef[M]]:
...
def split(self: M, first: filterlib.Filter, /) -> tuple[State, GraphDef[M]]:
...
def split(
self: M,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
...
def split(
self: M, *filters: filterlib.Filter
) -> tuple[State, tpe.Unpack[tuple[State, ...]], GraphDef[M]]:
state, graphdef, _ = graph_utils.graph_flatten(self)
if len(filters) == 0:
states = (state,)
elif len(filters) == 1:
states = (state.split(filters[0]),)
else:
states = state.split(filters[0], filters[1], *filters[2:])
return *states, graphdef
def get_state(self) -> State:
state, _ = self.split()
return state
def get_graphdef(self: M) -> GraphDef[M]:
_, graphdef = self.split()
return graphdef
def extract(self, first: filterlib.Filter, /) -> State:
...
def extract(
self,
first: filterlib.Filter,
second: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def extract(
self,
first: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tp.Union[State, tuple[State, ...]]:
state = self.get_state()
if len(filters) == 0:
states = state.extract(first)
else:
states = state.extract(first, filters[0], *filters[1:])
return states
def pop(
self,
filter: filterlib.Filter,
/,
) -> State:
...
def pop(
self,
filter: filterlib.Filter,
filter2: filterlib.Filter,
/,
*filters: filterlib.Filter,
) -> tuple[State, ...]:
...
def pop(
self, *filters: filterlib.Filter
) -> tp.Union[State, tuple[State, ...]]:
if len(filters) == 0:
raise ValueError('Expected at least one filter')
states = graph_utils.graph_pop(self, filters)
if len(states) == 1:
return states[0]
else:
return states
def apply(self: M) -> ApplyCaller[M]:
def _apply(accessor: DelayedAccessor, *args, **kwargs) -> tuple[tp.Any, M]:
module = self.clone()
fn = accessor(module)
out = fn(*args, **kwargs)
return out, module
return CallableProxy(_apply) # type: ignore
def update(self: M, update: Updates[M], /, *updates: Updates[M]) -> None:
updates = (update, *updates)
def _states_and_moduledef(
updates,
) -> tuple[list[State], tp.Optional[Module]]:
leaves = jax.tree_util.tree_leaves(
updates, is_leaf=lambda x: isinstance(x, (GraphDef, State))
)
states: list[State] = []
module: tp.Optional[Module] = None
for leaf in leaves:
if isinstance(leaf, (Module, GraphDef)):
if module is not None:
raise ValueError(
'Expected only one GraphDef or Module in the updates'
)
if isinstance(leaf, Module):
module = leaf
states.append(leaf.get_state())
else:
module = leaf.make_empty()
elif isinstance(leaf, State):
states.append(leaf)
else:
raise ValueError(
'Expected a GraphDef, Module or State, got'
f' {type(leaf).__name__}'
)
return states, module
states, module_update = _states_and_moduledef(updates)
if module_update is not None:
graph_utils.graph_update_static(self, module_update)
if states:
graph_utils.graph_update_dynamic(self, states)
def sow(
self,
variable_type: tp.Type[variableslib.Variable[tp.Any]],
name: str,
value: A,
reduce_fn: tp.Callable[[B, A], B] = tuple_reduce,
init_fn: tp.Callable[[], B] = tuple_init, # type: ignore
) -> None:
if hasattr(self, name):
variable = getattr(self, name)
if not isinstance(variable, variableslib.Variable):
raise ValueError(
f"Expected '{name}' to be a Variable, got {type(variable).__name__}"
)
elif type(variable) != variable_type:
raise ValueError(
f"Expected '{name}' to be of type '{variable_type.__name__}', "
f"got '{type(variable).__name__}'"
)
variable.raw_value = reduce_fn(variable.raw_value, value)
else:
reduced_value = reduce_fn(init_fn(), value)
setattr(self, name, variable_type(reduced_value))
def modules(self) -> tp.Iterator[tuple[Path, Module]]:
for path, value in graph_utils.iter_nodes(self):
if isinstance(value, Module):
yield path, value
def set_attributes(
self,
*filters: filterlib.Filter,
raise_if_not_found: bool = True,
**attributes: tp.Any,
) -> None:
"""Sets the attributes of nested Modules including the current Module.
If the attribute is not found in the Module, it is ignored.
Example::
>>> from flax.experimental import nnx
...
>>> class Block(nnx.Module):
... def __init__(self, din, dout, *, rngs: nnx.Rngs):
... self.linear = nnx.Linear(din, dout, rngs=rngs)
... self.dropout = nnx.Dropout(0.5, deterministic=False)
... self.batch_norm = nnx.BatchNorm(10, use_running_average=False, rngs=rngs)
...
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(False, False)
>>> block.set_attributes(deterministic=True, use_running_average=True)
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, True)
``Filter``'s can be used to set the attributes of specific Modules::
>>> block = Block(2, 5, rngs=nnx.Rngs(0))
>>> block.set_attributes(nnx.Dropout, deterministic=True, use_running_average=True)
>>> # Only the dropout will be modified
>>> block.dropout.deterministic, block.batch_norm.use_running_average
(True, False)
Args:
*filters: Filters to select the Modules to set the attributes of.
raise_if_not_found: If True (default), raises a ValueError if at least one attribute
instance is not found in one of the selected Modules.
**attributes: The attributes to set.
"""
remaining_attributes = set(attributes.keys())
if not filters:
filters = (True,)
predicates = tuple(map(filterlib.to_predicate, filters))
for path, module in self.modules():
for predicate in predicates:
if predicate(path, module):
for name, value in attributes.items():
if hasattr(module, name):
if name in remaining_attributes:
remaining_attributes.remove(name)
setattr(module, name, value)
break
if remaining_attributes and raise_if_not_found:
raise ValueError(
f'Could not find at least one instance of the following attributes: {remaining_attributes}'
)
def __init_subclass__(cls, experimental_pytree: bool = False) -> None:
super().__init_subclass__()
graph_utils.register_mutable_node_type(
type=cls,
flatten=_module_graph_flatten,
set_key=_module_graph_set_key,
pop_key=_module_graph_pop_key,
create_empty=_module_graph_create_empty,
clear=_module_graph_clear,
)
if experimental_pytree:
jtu.register_pytree_with_keys(
cls,
partial(_module_flatten, with_keys=True),
_module_unflatten,
flatten_func=partial(_module_flatten, with_keys=False),
)
def _module_graph_clear(module: Module, cls: tp.Type[M]):
module_state = module._module__state
module_vars = vars(module)
module_vars.clear()
module_vars['_module__state'] = module_state | null |
22,773 | from __future__ import annotations
import dataclasses
import typing as tp
from abc import ABCMeta
from copy import deepcopy
from functools import partial
import jax
import jax.tree_util as jtu
import numpy as np
import typing_extensions as tpe
from flax.experimental.nnx.nnx import (
errors,
filterlib,
graph_utils,
ids,
reprlib,
tracers,
)
from flax.experimental.nnx.nnx import variables as variableslib
from flax.experimental.nnx.nnx.graph_utils import GraphDef
from flax.experimental.nnx.nnx.proxy_caller import (
ApplyCaller,
CallableProxy,
DelayedAccessor,
)
from flax.experimental.nnx.nnx.rnglib import Rngs
from flax.experimental.nnx.nnx.state import State
from flax.experimental.nnx.nnx.variables import Variable
from flax.typing import Path
A = tp.TypeVar('A')
The provided code snippet includes necessary dependencies for implementing the `first_from` function. Write a Python function `def first_from(*args: tp.Optional[A], error_msg: str) -> A` to solve the following problem:
Return the first non-None argument. If all arguments are None, raise a ValueError with the given error message. Args: *args: the arguments to check error_msg: the error message to raise if all arguments are None Returns: The first non-None argument.
Here is the function:
def first_from(*args: tp.Optional[A], error_msg: str) -> A:
"""Return the first non-None argument.
If all arguments are None, raise a ValueError with the given error message.
Args:
*args: the arguments to check
error_msg: the error message to raise if all arguments are None
Returns:
The first non-None argument.
"""
for arg in args:
if arg is not None:
return arg
raise ValueError(error_msg) | Return the first non-None argument. If all arguments are None, raise a ValueError with the given error message. Args: *args: the arguments to check error_msg: the error message to raise if all arguments are None Returns: The first non-None argument. |
22,774 | import collections
import dataclasses
import os
import input_pipeline
import jax
import jax.numpy as jnp
import models
import numpy as np
import optax
import temperature_sampler
import tensorflow as tf
import utils
from absl import logging
from clu import metric_writers, periodic_actions
from configs import default
from jax import random
from jax.sharding import Mesh, NamedSharding
from jax.sharding import PartitionSpec as P
from utils import HasCache, TrainState
from flax import linen as nn
from flax.experimental import nnx
from flax.training import checkpoints, common_utils
The provided code snippet includes necessary dependencies for implementing the `per_host_sum_pmap` function. Write a Python function `def per_host_sum_pmap(in_tree)` to solve the following problem:
Execute psum on in_tree"s leaves over one device per host.
Here is the function:
def per_host_sum_pmap(in_tree):
"""Execute psum on in_tree"s leaves over one device per host."""
host2devices = collections.defaultdict(list)
for d in jax.devices():
host2devices[d.process_index].append(d)
devices = [host2devices[k][0] for k in host2devices]
host_psum = jax.pmap(lambda x: jax.lax.psum(x, 'i'), 'i', devices=devices)
def pre_pmap(xs):
return jax.tree_util.tree_map(
lambda x: jnp.broadcast_to(x, (1,) + x.shape), xs
)
def post_pmap(xs):
return jax.tree_util.tree_map(lambda x: x[0], xs)
return post_pmap(host_psum(pre_pmap(in_tree))) | Execute psum on in_tree"s leaves over one device per host. |
22,775 | import collections
import dataclasses
import os
import input_pipeline
import jax
import jax.numpy as jnp
import models
import numpy as np
import optax
import temperature_sampler
import tensorflow as tf
import utils
from absl import logging
from clu import metric_writers, periodic_actions
from configs import default
from jax import random
from jax.sharding import Mesh, NamedSharding
from jax.sharding import PartitionSpec as P
from utils import HasCache, TrainState
from flax import linen as nn
from flax.experimental import nnx
from flax.training import checkpoints, common_utils
def create_learning_rate_schedule(learning_rate: float, warmup_steps: int):
"""Creates a rsqrt schedule with linear warmup."""
return optax.join_schedules(
[
optax.linear_schedule(
init_value=0,
end_value=learning_rate,
transition_steps=warmup_steps,
),
rsqrt_schedule(init_value=learning_rate, shift=warmup_steps),
],
boundaries=[warmup_steps],
)
def train_step(
state: TrainState,
batch,
learning_rate_fn,
label_smoothing=0.0,
dropout_rng=None,
):
"""Perform a single training step."""
# X_position and X_segmentation are needed only when using "packed examples"
# where multiple sequences are packed into the same example with this
# metadata.
# if such features are not present they are ignored and the example is treated
# like a normal, unpacked sequence example.
train_keys = ['inputs', 'inputs_position', 'inputs_segmentation']
(inputs, inputs_positions, inputs_segmentation) = (
batch.get(k, None) for k in train_keys
)
weights = jnp.where(inputs > 0, 1, 0).astype(jnp.float32)
dropout_rng = jax.random.fold_in(dropout_rng, state.step)
def loss_fn(params):
"""loss function used for training."""
module = state.graphdef.merge(params)
module.set_attributes(deterministic=False, decode=False)
logits = module(
inputs,
inputs_positions=inputs_positions,
inputs_segmentation=inputs_segmentation,
rngs=nnx.Rngs(dropout=dropout_rng),
)
loss, weight_sum = compute_weighted_cross_entropy(
logits, inputs, weights, label_smoothing
)
mean_loss = loss / weight_sum
return mean_loss, logits
step = state.step
lr = learning_rate_fn(step)
grad_fn = jax.value_and_grad(loss_fn, has_aux=True)
(_, logits), grads = grad_fn(state.params)
new_state = state.apply_gradients(grads=grads)
metrics = compute_metrics(logits, inputs, weights)
metrics['learning_rate'] = lr
return new_state, metrics
def eval_step(
params: nnx.State,
batch,
static: nnx.GraphDef[models.TransformerLM],
label_smoothing=0.0,
):
"""Calculate evaluation metrics on a batch."""
inputs = batch['inputs']
weights = jnp.where(inputs > 0, 1.0, 0.0)
module = static.merge(params)
module.set_attributes(deterministic=True, decode=False)
logits = module(inputs)
return compute_metrics(logits, inputs, weights, label_smoothing)
def predict_step(
inputs,
params: nnx.State,
rngkey: jax.Array,
static: nnx.GraphDef[models.TransformerLM],
eos_id: int,
max_decode_len: int,
config: models.TransformerConfig,
temperature: float,
top_k: int,
):
"""Predict language model on a batch."""
module = static.merge(params)
# TODO(cgarciae): check how pytorch does this.
for _path, m in module.modules():
if isinstance(m, HasCache):
input_shape = (inputs.shape[0], max_decode_len, config.emb_dim)
m.init_cache(input_shape, dtype=config.dtype)
cache = module.extract(nnx.Cache)
def tokens_ids_to_logits(flat_ids, cache: nnx.State):
"""Token slice to logits from decoder model."""
# --> [batch * beam, 1, vocab]
module = static.merge(params, cache)
module.set_attributes(deterministic=True, decode=True)
logits = module(flat_ids)
cache = module.extract(nnx.Cache)
# Remove singleton sequence-length dimension:
# [batch, 1, vocab] --> [batch, vocab]
logits = logits.squeeze(axis=1)
return logits, cache
# Using the above-defined single-step decoder function, run a
# beam search over possible sequences given input encoding.
seqs = temperature_sampler.temperature_sample(
inputs,
cache,
tokens_ids_to_logits,
rngkey,
temperature=temperature,
topk=top_k,
eos_token=eos_id,
)
return seqs
def evaluate(
*,
jit_eval_step,
state: TrainState,
eval_ds: tf.data.Dataset,
num_eval_steps: int,
):
"""Evaluate the target an return a dictionary with the metrics."""
logging.info('Gathering evaluation metrics.')
eval_metrics = []
eval_iter = iter(eval_ds) # pytype: disable=wrong-arg-types
for _, eval_batch in zip(range(num_eval_steps), eval_iter):
eval_batch = jax.tree_util.tree_map(lambda x: x._numpy(), eval_batch) # pylint: disable=protected-access
metrics = jit_eval_step(state.params, eval_batch, state.graphdef)
eval_metrics.append(metrics)
eval_metrics = common_utils.stack_forest(eval_metrics)
eval_metrics_sums = jax.tree_util.tree_map(jnp.sum, eval_metrics)
eval_denominator = eval_metrics_sums.pop('denominator')
eval_summary = jax.tree_util.tree_map(
lambda x: x / eval_denominator, # pylint: disable=cell-var-from-loop
eval_metrics_sums,
)
return eval_summary
def generate_prediction(
*,
jit_pred_step,
static: nnx.GraphDef[models.TransformerLM],
params: nnx.State,
tokenized_prompts,
eos_id,
inference_rng,
decode_tokens,
config: default.Config,
model_config: models.TransformerConfig,
):
"""Generate text from the prompt."""
n_devices = jax.local_device_count()
logging.info('Generating text.')
predictions = []
# Use batch of prompts provided by user.
for pred_batch in jnp.array_split(
tokenized_prompts, int(np.ceil(len(tokenized_prompts) / n_devices))
):
cur_pred_batch_size = pred_batch.shape[0]
if cur_pred_batch_size % n_devices:
padded_size = int(np.ceil(cur_pred_batch_size / n_devices) * n_devices)
pred_batch = jax.tree_util.tree_map(
lambda x: pad_examples(x, padded_size), pred_batch
) # pylint: disable=cell-var-from-loop
pred_batch = common_utils.shard(pred_batch)
inference_rng, sub_rng = random.split(inference_rng)
inference_rngs = random.split(sub_rng, n_devices)
predicted = jit_pred_step(
pred_batch,
params,
inference_rngs,
static,
eos_id,
config.max_predict_length,
model_config,
config.sampling_temperature,
config.sampling_top_k,
)
predicted = tohost(predicted)
# Iterate through non-padding examples of batch.
for s in predicted[:cur_pred_batch_size]:
prediction = decode_tokens(s)
logging.info('Sample: %s', str(prediction))
predictions.append(prediction)
# Save generated texts for tensorboard.
exemplars = ''
for prediction in predictions:
exemplars += f'{prediction}\n\n'
return exemplars
The provided code snippet includes necessary dependencies for implementing the `train_and_evaluate` function. Write a Python function `def train_and_evaluate(config: default.Config, workdir: str)` to solve the following problem:
Runs a training and evaluation loop. Args: config: Configuration to use. workdir: Working directory for checkpoints and TF summaries. If this contains checkpoint training will be resumed from the latest checkpoint.
Here is the function:
def train_and_evaluate(config: default.Config, workdir: str):
"""Runs a training and evaluation loop.
Args:
config: Configuration to use.
workdir: Working directory for checkpoints and TF summaries. If this
contains checkpoint training will be resumed from the latest checkpoint.
"""
tf.io.gfile.makedirs(workdir)
vocab_path = config.vocab_path
if vocab_path is None:
vocab_path = os.path.join(workdir, 'sentencepiece_model')
config.vocab_path = vocab_path
tf.io.gfile.makedirs(os.path.split(vocab_path)[0])
# Load Dataset
# ---------------------------------------------------------------------------
logging.info('Initializing dataset.')
train_ds, eval_ds, _, encoder = input_pipeline.get_datasets(
n_devices=jax.local_device_count(), config=config, vocab_path=vocab_path
)
train_iter = iter(train_ds)
vocab_size = int(encoder.vocab_size())
eos_id = temperature_sampler.EOS_ID # Default Sentencepiece EOS token.
def decode_tokens(toks):
valid_toks = toks[: np.argmax(toks == eos_id) + 1].astype(np.int32)
return encoder.detokenize(valid_toks).numpy().decode('utf-8')
def encode_strings(strs, max_len):
tokenized_batch = np.zeros((len(strs), max_len), np.int32)
for i, s in enumerate(strs):
toks = encoder.tokenize(s).numpy()
# Remove EOS token in prompt.
tokenized_batch[i, : toks.shape[0] - 1] = toks[:-1]
return tokenized_batch
tokenized_prompts = encode_strings(
[config.prompts], config.max_predict_length
)
logging.info('Initializing model, optimizer, and step functions.')
# Build Model and Optimizer
# ---------------------------------------------------------------------------
model_config = models.TransformerConfig(
vocab_size=vocab_size,
output_vocab_size=vocab_size,
logits_via_embedding=config.logits_via_embedding,
dtype=jnp.bfloat16 if config.use_bfloat16 else jnp.float32,
emb_dim=config.emb_dim,
num_heads=config.num_heads,
num_layers=config.num_layers,
qkv_dim=config.qkv_dim,
mlp_dim=config.mlp_dim,
max_len=max(config.max_target_length, config.max_eval_target_length),
dropout_rate=config.dropout_rate,
attention_dropout_rate=config.attention_dropout_rate,
kernel_init=nn.initializers.xavier_uniform(),
bias_init=nn.initializers.normal(stddev=1e-6),
axis_rules=config.axis_rules,
)
# Mesh definition
devices_array = utils.create_device_mesh(config)
mesh = Mesh(devices_array, config.mesh_axes)
# print(mesh.shape)
# exit()
start_step = 0
rng = jax.random.PRNGKey(config.seed)
rng, init_rng = jax.random.split(rng)
rng, inference_rng = random.split(rng)
def constructor(config: models.TransformerConfig, key: jax.Array):
return models.TransformerLM(config, rngs=nnx.Rngs(params=key))
learning_rate_fn = create_learning_rate_schedule(
learning_rate=config.learning_rate, warmup_steps=config.warmup_steps
)
optimizer = optax.adamw(
learning_rate_fn,
b1=0.9,
b2=0.98,
eps=1e-9,
weight_decay=config.weight_decay,
)
state, state_sharding = utils.setup_initial_state(
constructor, optimizer, model_config, init_rng, mesh
)
data_sharding = NamedSharding(mesh, P(config.data_sharding))
if config.restore_checkpoints:
# Restore unreplicated optimizer + model state from last checkpoint.
state = checkpoints.restore_checkpoint(workdir, state)
# Grab last step.
start_step = int(state.step)
writer = metric_writers.create_default_writer(
workdir, just_logging=jax.process_index() > 0
)
if start_step == 0:
writer.write_hparams(dataclasses.asdict(config))
# compile multidevice versions of train/eval/predict step fn.
jit_train_step = jax.jit(
train_step,
in_shardings=(
state_sharding,
data_sharding,
None,
), # type: ignore
out_shardings=(state_sharding, None), # type: ignore
static_argnums=(2, 3),
donate_argnums=0,
)
jit_eval_step = jax.jit(
eval_step,
in_shardings=(
state_sharding.params,
data_sharding,
), # type: ignore
out_shardings=None, # type: ignore
static_argnums=(2, 3),
)
# Since the inputs and rngkey args for predict_step will be batched,
# we must vmap them, otherwise the global arrays will be seen in each device
jit_pred_step = jax.jit(
jax.vmap(
predict_step,
in_axes=(
0,
jax.tree_util.tree_map(lambda x: None, state.params),
0,
None,
None,
None,
None,
None,
None,
),
),
in_shardings=(
data_sharding,
state_sharding.params,
data_sharding,
), # type: ignore
out_shardings=data_sharding, # type: ignore
static_argnums=tuple(range(3, 9)),
)
# Main Train Loop
# ---------------------------------------------------------------------------
# We init the first set of dropout PRNG keys, but update it afterwards inside
# the main pmap"d training update for performance.
dropout_rngs = rng
logging.info('Starting training loop.')
hooks = []
report_progress = periodic_actions.ReportProgress(
num_train_steps=config.num_train_steps, writer=writer
)
if jax.process_index() == 0:
hooks += [
report_progress,
periodic_actions.Profile(logdir=workdir, num_profile_steps=5),
]
train_metrics = []
with metric_writers.ensure_flushes(writer):
for step in range(start_step, config.num_train_steps):
is_last_step = step == config.num_train_steps - 1
# Shard data to devices and do a training step.
with jax.profiler.StepTraceAnnotation('train', step_num=step):
batch = next(train_iter)
batch = jax.tree_map(lambda x: jnp.asarray(x), batch)
state, metrics = jit_train_step(
state, batch, learning_rate_fn, 0.0, dropout_rngs
)
train_metrics.append(metrics)
# Quick indication that training is happening.
logging.log_first_n(logging.INFO, 'Finished training step %d.', 5, step)
for h in hooks:
h(step)
# Periodic metric handling.
if step % config.eval_every_steps == 0 or is_last_step:
with report_progress.timed('training_metrics'):
logging.info('Gathering training metrics.')
train_metrics = common_utils.stack_forest(train_metrics)
lr = train_metrics.pop('learning_rate').mean()
metrics_sums = jax.tree_util.tree_map(jnp.sum, train_metrics)
denominator = metrics_sums.pop('denominator')
summary = jax.tree_util.tree_map(
lambda x: x / denominator, metrics_sums
) # pylint: disable=cell-var-from-loop
summary['learning_rate'] = lr
summary['perplexity'] = jnp.clip(
jnp.exp(summary['loss']), a_max=1.0e4
)
summary = {'train_' + k: v for k, v in summary.items()}
writer.write_scalars(step, summary)
train_metrics = []
with report_progress.timed('eval'):
eval_results = evaluate(
jit_eval_step=jit_eval_step,
state=state,
eval_ds=eval_ds,
num_eval_steps=config.num_eval_steps,
)
# (clipped) perplexity after averaging log-perplexitie
eval_results['perplexity'] = jnp.clip(
jnp.exp(eval_results['loss']), a_max=1.0e4
)
writer.write_scalars(
step, {'eval_' + k: v for k, v in eval_results.items()}
)
with report_progress.timed('generate_text'):
exemplars = generate_prediction(
jit_pred_step=jit_pred_step,
static=state.graphdef,
params=state.params,
tokenized_prompts=tokenized_prompts,
eos_id=eos_id,
inference_rng=inference_rng,
decode_tokens=decode_tokens,
config=config,
model_config=model_config,
)
writer.write_texts(step, {'samples': exemplars})
# Save a checkpoint on one host after every checkpoint_freq steps.
save_checkpoint = (
step % config.checkpoint_every_steps == 0 or is_last_step
)
if config.save_checkpoints and save_checkpoint:
logging.info('Saving checkpoint step %d.', step)
with report_progress.timed('checkpoint'):
checkpoints.save_checkpoint_multiprocess(workdir, state, step) | Runs a training and evaluation loop. Args: config: Configuration to use. workdir: Working directory for checkpoints and TF summaries. If this contains checkpoint training will be resumed from the latest checkpoint. |
22,776 | import dataclasses
import os
import tempfile
import time
from typing import Any, Dict, Iterable, Tuple
import jax
import tensorflow as tf
import tensorflow_text as tftxt
from absl import logging
from sentencepiece import SentencePieceTrainer
def _train_sentencepiece(
dataset: tf.data.Dataset,
*,
vocab_size: int,
maxchars: int = int(1e7),
model_path: str,
model_type: str = 'unigram',
character_coverage: float = 1.0,
data_keys=('inputs', 'targets'),
):
"""Train SentencePiece tokenizer from subset of tf dataset.
Args:
dataset: tf.dataset
vocab_size: int: size of vocab tokens to train.
maxchars: int: number of characters to use for sentencepiece training.
model_path: str: path of model file to save vocab model to.
model_type: str: type of sentencepiece vocab to train.
character_coverage: amount of characters covered by the model, good defaults
are 0.9995 for languages with rich character set like Japanese or Chinese
and 1.0 for other languages with small character set.
data_keys: Tuple[str]: keys of dataset to use for training.
Returns:
path to the trained sentencepiece vocabulary model.
"""
if model_path.startswith('gs://'):
abs_model_path = model_path
else:
abs_model_path = os.path.abspath(os.path.expanduser(model_path))
fname, _ = _dump_chars_to_textfile(
dataset, maxchars=maxchars, data_keys=data_keys
)
with tempfile.NamedTemporaryFile(
delete=False, prefix='/tmp/sp_tmp'
) as model_fp:
pass # we just want a prefix'd tmp-filename
argstr = ' '.join(
[
f'--input={fname}',
f'--vocab_size={vocab_size}',
f'--character_coverage={character_coverage}',
f'--model_prefix={model_fp.name}',
f'--model_type={model_type}',
]
)
SentencePieceTrainer.Train(argstr)
if jax.process_index() == 0:
# Use an intermediate filename that is renamed to the target name to address
# create and fill delays.
copy_rename_path = abs_model_path + '.rntmp'
tf.io.gfile.copy(model_fp.name + '.model', copy_rename_path, overwrite=True)
tf.io.gfile.rename(copy_rename_path, abs_model_path, overwrite=True)
logging.info('copied %s to %s', model_fp.name + '.model', abs_model_path)
else:
while not tf.io.gfile.exists(abs_model_path):
time.sleep(1)
time.sleep(1)
return abs_model_path
def _load_sentencepiece_tokenizer(
model_path: str,
add_bos: bool = False,
add_eos: bool = True,
reverse: bool = False,
):
"""Load a tf-text SentencePiece tokenizer from given model filepath."""
with tf.io.gfile.GFile(model_path, 'rb') as model_fp:
sp_model = model_fp.read()
sp_tokenizer = tftxt.SentencepieceTokenizer(
model=sp_model, add_bos=add_bos, add_eos=add_eos, reverse=reverse
)
return sp_tokenizer
The provided code snippet includes necessary dependencies for implementing the `load_or_train_tokenizer` function. Write a Python function `def load_or_train_tokenizer( dataset: tf.data.Dataset, *, vocab_path: str, vocab_size: int, max_corpus_chars: int, data_keys: Tuple[str, str] = ('inputs', 'targets'), )` to solve the following problem:
Loads the tokenizer at `vocab_path` or trains a one from `dataset`.
Here is the function:
def load_or_train_tokenizer(
dataset: tf.data.Dataset,
*,
vocab_path: str,
vocab_size: int,
max_corpus_chars: int,
data_keys: Tuple[str, str] = ('inputs', 'targets'),
):
"""Loads the tokenizer at `vocab_path` or trains a one from `dataset`."""
try:
return _load_sentencepiece_tokenizer(vocab_path)
except tf.errors.NotFoundError:
logging.info('SentencePiece vocab not found, building one from data.')
vocab_path = _train_sentencepiece(
dataset,
vocab_size=vocab_size,
maxchars=max_corpus_chars,
model_path=vocab_path,
data_keys=data_keys,
)
return _load_sentencepiece_tokenizer(vocab_path) | Loads the tokenizer at `vocab_path` or trains a one from `dataset`. |
22,777 | from __future__ import annotations
import dataclasses
from typing import Any, Optional
import jax
import jax.numpy as jnp
import numpy as np
from jax import lax
from flax.experimental import nnx
from flax.experimental.nnx.examples.lm1b.configs import default
def shift_right(x: jax.Array, axis: int = 1):
"""Shift the input to the right by padding and slicing on axis."""
pad_widths: list[tuple[int, int]] = [(0, 0)] * len(x.shape)
pad_widths[axis] = (1, 0)
padded = jnp.pad(
x, pad_widths, mode='constant', constant_values=x.dtype.type(0)
)
return lax.dynamic_slice_in_dim(padded, 0, padded.shape[axis] - 1, axis)
The provided code snippet includes necessary dependencies for implementing the `shift_inputs` function. Write a Python function `def shift_inputs(x: jax.Array, segment_ids=None, axis: int = 1)` to solve the following problem:
Shift inputs and replace EOS by 0 for packed inputs.
Here is the function:
def shift_inputs(x: jax.Array, segment_ids=None, axis: int = 1):
"""Shift inputs and replace EOS by 0 for packed inputs."""
shifted = shift_right(x, axis=axis)
# For packed targets, the first shifted token of a new sequence is made
# 0, rather than being the EOS token for the last sequence.
if segment_ids is not None:
shifted *= segment_ids == shift_right(segment_ids, axis=axis)
return shifted | Shift inputs and replace EOS by 0 for packed inputs. |
22,778 | from __future__ import annotations
import dataclasses
from typing import Any, Optional
import jax
import jax.numpy as jnp
import numpy as np
from jax import lax
from flax.experimental import nnx
from flax.experimental.nnx.examples.lm1b.configs import default
The provided code snippet includes necessary dependencies for implementing the `sinusoidal_init` function. Write a Python function `def sinusoidal_init(max_len=2048, min_scale=1.0, max_scale=10000.0)` to solve the following problem:
1D Sinusoidal Position Embedding Initializer. Args: max_len: maximum possible length for the input. min_scale: float: minimum frequency-scale in sine grating. max_scale: float: maximum frequency-scale in sine grating. Returns: output: init function returning `(1, max_len, d_feature)`
Here is the function:
def sinusoidal_init(max_len=2048, min_scale=1.0, max_scale=10000.0):
"""1D Sinusoidal Position Embedding Initializer.
Args:
max_len: maximum possible length for the input.
min_scale: float: minimum frequency-scale in sine grating.
max_scale: float: maximum frequency-scale in sine grating.
Returns:
output: init function returning `(1, max_len, d_feature)`
"""
def init(key, shape, dtype=np.float32):
"""Sinusoidal init."""
del key, dtype
d_feature = shape[-1]
pe = np.zeros((max_len, d_feature), dtype=np.float32)
position = np.arange(0, max_len)[:, np.newaxis]
scale_factor = -np.log(max_scale / min_scale) / (d_feature // 2 - 1)
div_term = min_scale * np.exp(np.arange(0, d_feature // 2) * scale_factor)
pe[:, : d_feature // 2] = np.sin(position * div_term)
pe[:, d_feature // 2 : 2 * (d_feature // 2)] = np.cos(position * div_term)
pe = pe[np.newaxis, :, :] # [1, max_len, d_feature]
return jnp.array(pe)
return init | 1D Sinusoidal Position Embedding Initializer. Args: max_len: maximum possible length for the input. min_scale: float: minimum frequency-scale in sine grating. max_scale: float: maximum frequency-scale in sine grating. Returns: output: init function returning `(1, max_len, d_feature)` |
22,779 | import jax.numpy as jnp
from jax import lax, random
EOS_ID = 2
The provided code snippet includes necessary dependencies for implementing the `temperature_sample` function. Write a Python function `def temperature_sample( prompt_inputs, init_cache, tokens_to_logits, prng_key, temperature=1.0, topk=20, eos_token=EOS_ID, )` to solve the following problem:
Temperature sampling for language model generation. Args: prompt_inputs: array: [batch_size, max_decode_len] int32 sequence of tokens. init_cache: flax attention cache. tokens_to_logits: fast autoregressive decoder function taking single token slices and cache and returning next-token logits and updated cache. prng_key: JAX PRNGKey. temperature: float: sampling temperature factor. As it approaches zero this becomes equivalent to greedy sampling. topk: integer: if nonzero only use the top-k logits to sample next token, if zero don't use any cutoff and sample from full logits over vocabulary. eos_token: int: end-of-sentence token for target vocabulary. Returns: Array of sampled sequences: [batch_size, max_decode_len]
Here is the function:
def temperature_sample(
prompt_inputs,
init_cache,
tokens_to_logits,
prng_key,
temperature=1.0,
topk=20,
eos_token=EOS_ID,
):
"""Temperature sampling for language model generation.
Args:
prompt_inputs: array: [batch_size, max_decode_len] int32 sequence of tokens.
init_cache: flax attention cache.
tokens_to_logits: fast autoregressive decoder function taking single token
slices and cache and returning next-token logits and updated cache.
prng_key: JAX PRNGKey.
temperature: float: sampling temperature factor. As it approaches
zero this becomes equivalent to greedy sampling.
topk: integer: if nonzero only use the top-k logits to sample next token,
if zero don't use any cutoff and sample from full logits over vocabulary.
eos_token: int: end-of-sentence token for target vocabulary.
Returns:
Array of sampled sequences: [batch_size, max_decode_len]
"""
batch_size = prompt_inputs.shape[0]
max_decode_len = prompt_inputs.shape[1]
end_marker = jnp.array(eos_token)
temperature = jnp.array(temperature)
# Initialize sampling loop state.
# initial loop PRNGKey
rng0 = prng_key
# loop position counter.
i0 = jnp.array(-1)
# per batch-item holding current token in loop.
token0 = jnp.zeros((batch_size, 1), dtype=jnp.int32)
# per batch-item state bit indicating if sentence has finished.
ended0 = jnp.zeros((batch_size, 1), dtype=jnp.bool_)
# (batch, length) array containing prefix prompt tokens for sampling loop
# as well as the generated output of newly sampled tokens.
sequences0 = prompt_inputs
# Sampling loop state is stored in a simple tuple.
sampling_loop_init_state = (i0, sequences0, init_cache, token0, ended0, rng0)
def sampling_loop_cond_fn(state):
"""Sampling loop termination condition."""
(i, _, _, _, ended, _) = state
# Have we reached max decoding length?
not_at_end = i < max_decode_len - 1
# Have all sampled sequences reached an end marker?
all_sequences_ended = jnp.all(ended)
return not_at_end & (~all_sequences_ended)
def sampling_loop_body_fn(state):
"""Sampling loop state update."""
i, sequences, cache, cur_token, ended, rng = state
# Split RNG for sampling.
rng1, rng2 = random.split(rng)
# Call fast-decoder model on current tokens to get next-position logits.
logits, new_cache = tokens_to_logits(cur_token, cache)
# Sample next token from logits.
# TODO(levskaya): add top-p "nucleus" sampling option.
if topk:
# Get top-k logits and their indices, sample within these top-k tokens.
topk_logits, topk_idxs = lax.top_k(logits, topk)
topk_token = jnp.expand_dims(
random.categorical(rng1, topk_logits / temperature).astype(jnp.int32),
axis=-1,
)
# Return the original indices corresponding to the sampled top-k tokens.
next_token = jnp.squeeze(
jnp.take_along_axis(topk_idxs, topk_token, axis=-1), axis=-1
)
else:
next_token = random.categorical(rng1, logits / temperature).astype(
jnp.int32
)
# Only use sampled tokens if we're past provided prefix tokens.
out_of_prompt = sequences[:, i + 1] == 0
next_token = (
next_token * out_of_prompt + sequences[:, i + 1] * ~out_of_prompt
)
# If end-marker reached for batch item, only emit padding tokens.
next_token_or_endpad = next_token[None] * ~ended
ended |= next_token_or_endpad == end_marker
# Add current sampled tokens to recorded sequences.
new_sequences = lax.dynamic_update_slice(
sequences, next_token_or_endpad, (0, i + 1)
)
return (i + 1, new_sequences, new_cache, next_token_or_endpad, ended, rng2)
# Run sampling loop and collect final state.
final_state = lax.while_loop(
sampling_loop_cond_fn, sampling_loop_body_fn, sampling_loop_init_state
)
# Pick part of the state corresponding to the sampled sequences.
final_sequences = final_state[1]
return final_sequences | Temperature sampling for language model generation. Args: prompt_inputs: array: [batch_size, max_decode_len] int32 sequence of tokens. init_cache: flax attention cache. tokens_to_logits: fast autoregressive decoder function taking single token slices and cache and returning next-token logits and updated cache. prng_key: JAX PRNGKey. temperature: float: sampling temperature factor. As it approaches zero this becomes equivalent to greedy sampling. topk: integer: if nonzero only use the top-k logits to sample next token, if zero don't use any cutoff and sample from full logits over vocabulary. eos_token: int: end-of-sentence token for target vocabulary. Returns: Array of sampled sequences: [batch_size, max_decode_len] |
22,780 | from __future__ import annotations
import dataclasses
class Config:
# Path to load or store sentencepiece vocab file.
vocab_path: str | None = None
# Vocabulary size if `vocab_path` is not given.
vocab_size: int = 30_000
# Maximum number of characters to use for training.
max_corpus_chars: int = 10**7
# Name of TFDS translation dataset to use.
dataset_name: str = 'lm1b'
# Optional name of TFDS translation dataset to use for evaluation.
eval_dataset_name: str = 'lm1b'
# Optional name of TFDS split to use for evaluation.
eval_split: str = 'test'
# Per device batch size for training.
per_device_batch_size: int = 32
# Per device batch size for training.
eval_per_device_batch_size: int = 32
# Sampling temperature for language model inference.
sampling_temperature: float = 0.6
# Top k cutoff for logit sampling. If 0 then no top-k cutoff is used.
sampling_top_k: int = 20
# Number of steps to take during training.
num_train_steps: int = 500_000
# Number of steps to take during evaluation.
# Large enough to evaluate all samples: 306_688 / (32 * 8) = 1198
num_eval_steps: int = 2_000
# Number of steps to generate predictions.
# -1 will use the whole eval dataset.
num_predict_steps: int = -1
# Base learning rate.
learning_rate: float = 0.0016
# Linear learning rate warmup.
warmup_steps: int = 1000
# Cross entropy loss label smoothing.
label_smoothing: float = 0.0
# Decay factor for AdamW style weight decay.
weight_decay: float = 0.1
# Maximum length cutoff for training examples.
max_target_length: int = 128
# Maximum length cutoff for eval examples.
max_eval_target_length: int = 512
# Maximum length cutoff for predicted tokens.
max_predict_length: int = 50
# Final logit transform uses embedding matrix transpose.
logits_via_embedding: bool = False
# Number of transformer layers.
num_layers: int = 6
# Size of query/key/value for attention.
qkv_dim: int = 512
# Size of embeddings.
emb_dim: int = 512
# Size of the MLP.
mlp_dim: int = 2048
# Number of attention heads.
num_heads: int = 8
# Dropout rate.
dropout_rate: float = 0.1
# Attention dropout rate.
attention_dropout_rate: float = 0.1
# Whether to save model checkpoints.
save_checkpoints: bool = True
# Whether to restore from existing model checkpoints.
restore_checkpoints: bool = True
# Save a checkpoint every these number of steps.
checkpoint_every_steps: int = 10_000
# Frequency of eval during training, e.g. every 1_000 steps.
eval_every_steps: int = 1_000
# Use bfloat16 mixed precision training instead of float32.
use_bfloat16: bool = True
# Integer for PRNG random seed.
seed: int = 0
# Prompt for language model sampling,
# taken from MaxText (https://github.com/google/maxtext/blob/main/MaxText/configs/base.yml).
prompts: str = 'I love to '
# Parallelism
mesh_axes: tuple[str, ...] = ('data', 'fsdp', 'tensor')
axis_rules: MeshRules = MeshRules(
embed='fsdp',
mlp='tensor',
kv='tensor',
vocab='tensor',
)
data_sharding: tuple[str, ...] = ('data',)
# One axis for each parallelism type may hold a placeholder (-1)
# value to auto-shard based on available slices and devices.
# By default, product of the DCN axes should equal number of slices
# and product of the ICI axes should equal number of devices per slice.
# ICI (Inter-Chip Interconnection): A high-speed connection between
# sets of TPU chips, which form the TPU network.
# DCN (Data Center Network): A connection between the TPU networks;
# not as fast as ICI.
# ICI has around 100x the bandwidth of DCN, but it is not a general
# purpose connection, which is why DCN is necessary for scaling to
# extremely large ML models.
dcn_data_parallelism: int = -1
dcn_fsdp_parallelism: int = 1
dcn_tensor_parallelism: int = 1
ici_data_parallelism: int = 1
ici_fsdp_parallelism: int = -1
ici_tensor_parallelism: int = 1
def replace(self, **kwargs):
return dataclasses.replace(self, **kwargs)
The provided code snippet includes necessary dependencies for implementing the `get_config` function. Write a Python function `def get_config()` to solve the following problem:
Get the default hyperparameter configuration.
Here is the function:
def get_config():
"""Get the default hyperparameter configuration."""
config = Config()
return config | Get the default hyperparameter configuration. |
22,781 | import os
from typing import Dict, List, Optional, Union
import tensorflow as tf
import tensorflow_datasets as tfds
import tokenizer
from clu import deterministic_data
from configs import default
AUTOTUNE = tf.data.experimental.AUTOTUNE
def get_raw_dataset(
dataset_builder: tfds.core.DatasetBuilder, split: str
) -> tf.data.Dataset:
"""Loads a raw text dataset and normalizes feature keys.
Args:
dataset_builder: TFDS dataset builder that can build `split`.
split: Split to use. This must be the full split. We shard the split across
multiple hosts and currently don't support sharding subsplits.
Returns:
Dataset with source and target language features mapped to 'inputs' and
'targets'.
"""
num_examples = dataset_builder.info.splits[split].num_examples
per_host_split = deterministic_data.get_read_instruction_for_host(
split, num_examples, drop_remainder=False
)
ds = dataset_builder.as_dataset(split=per_host_split, shuffle_files=False)
ds = ds.map(
NormalizeFeatureNamesOp(dataset_builder.info), num_parallel_calls=AUTOTUNE
)
return ds
def preprocess_data(
dataset,
shuffle: bool,
num_epochs: Optional[int] = 1,
pack_examples: bool = True,
shuffle_buffer_size: int = 1024,
max_length: int = 512,
batch_size: int = 256,
drop_remainder: bool = True,
prefetch_size: int = AUTOTUNE,
):
"""Shuffle and batch/pack the given dataset."""
def length_filter(max_len):
def filter_fn(x):
source, target = x['inputs'], x['targets']
l = tf.maximum(tf.shape(source)[0], tf.shape(target)[0])
return tf.less(l, max_len + 1)
return filter_fn
if max_length > 0:
dataset = dataset.filter(length_filter(max_length))
if shuffle:
dataset = dataset.shuffle(shuffle_buffer_size)
dataset = dataset.repeat(num_epochs)
if pack_examples:
dataset = pack_dataset(dataset, max_length)
dataset = dataset.batch(batch_size, drop_remainder=drop_remainder)
else: # simple (static-shape) padded batching
dataset = dataset.padded_batch(
batch_size,
padded_shapes={'inputs': max_length, 'targets': max_length},
padding_values={'inputs': 0, 'targets': 0},
drop_remainder=drop_remainder,
)
if prefetch_size:
dataset = dataset.prefetch(prefetch_size)
return dataset
The provided code snippet includes necessary dependencies for implementing the `get_datasets` function. Write a Python function `def get_datasets( config: default.Config, *, n_devices: int, vocab_path: Optional[str] = None, )` to solve the following problem:
Load and return dataset of batched examples for use during training.
Here is the function:
def get_datasets(
config: default.Config,
*,
n_devices: int,
vocab_path: Optional[str] = None,
):
"""Load and return dataset of batched examples for use during training."""
if vocab_path is None:
vocab_path = os.path.expanduser('~/lm1b_sentencepiece_model')
train_ds_builder = tfds.builder(config.dataset_name)
train_data = get_raw_dataset(train_ds_builder, 'train')
if config.eval_dataset_name:
eval_ds_builder = tfds.builder(config.eval_dataset_name)
else:
eval_ds_builder = train_ds_builder
eval_data = get_raw_dataset(eval_ds_builder, config.eval_split)
# Tokenize data.
sp_tokenizer = tokenizer.load_or_train_tokenizer(
train_data,
vocab_path=vocab_path,
vocab_size=config.vocab_size,
max_corpus_chars=config.max_corpus_chars,
)
train_data = train_data.map(
tokenizer.TokenizeOp(sp_tokenizer), num_parallel_calls=AUTOTUNE
)
eval_data = eval_data.map(
tokenizer.TokenizeOp(sp_tokenizer), num_parallel_calls=AUTOTUNE
)
batch_size = config.per_device_batch_size * n_devices
if config.eval_per_device_batch_size > 0:
eval_batch_size = config.eval_per_device_batch_size * n_devices
else:
eval_batch_size = batch_size
train_ds = preprocess_data(
train_data,
shuffle=True,
num_epochs=None,
pack_examples=True,
batch_size=batch_size,
max_length=config.max_target_length,
)
eval_ds = preprocess_data(
eval_data,
shuffle=False,
pack_examples=False,
batch_size=eval_batch_size,
max_length=config.max_eval_target_length,
)
predict_ds = preprocess_data(
eval_data,
shuffle=False,
pack_examples=False,
batch_size=eval_batch_size,
max_length=config.max_predict_length,
drop_remainder=False,
)
return train_ds, eval_ds, predict_ds, sp_tokenizer | Load and return dataset of batched examples for use during training. |
22,782 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from flax.experimental import nnx
X = np.linspace(0, 1, 100)[:, None]
Y = 0.8 * X**2 + 0.1 + np.random.normal(0, 0.1, size=X.shape)
def dataset(batch_size):
while True:
idx = np.random.choice(len(X), size=batch_size)
yield X[idx], Y[idx] | null |
22,783 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from flax.experimental import nnx
class Count(nnx.Variable[nnx.A]):
pass
class MLP(nnx.Module):
def __init__(self, din, dhidden, dout, *, rngs: nnx.Rngs):
self.count = Count(jnp.array(0))
self.linear1 = Linear(din, dhidden, rngs=rngs)
self.linear2 = Linear(dhidden, dout, rngs=rngs)
def __call__(self, x):
self.count.value += 1
x = self.linear1(x)
x = jax.nn.relu(x)
x = self.linear2(x)
return x
params, counts, static = MLP(din=1, dhidden=32, dout=1, rngs=nnx.Rngs(0)).split(
nnx.Param, ...
)
del params, counts
y_pred = model(X)
def train_step(state: nnx.TrainState[MLP], batch):
x, y = batch
def loss_fn(params):
y_pred, (updates, _) = state.apply(params, 'counts')(x)
counts = updates.extract(Count)
loss = jnp.mean((y - y_pred) ** 2)
return loss, counts
grads, counts = jax.grad(loss_fn, has_aux=True)(state.params)
# sdg update
state = state.apply_gradients(grads=grads, counts=counts)
return state | null |
22,784 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din, dhidden, dout, *, rngs: nnx.Rngs):
self.count = Count(jnp.array(0))
self.linear1 = Linear(din, dhidden, rngs=rngs)
self.linear2 = Linear(dhidden, dout, rngs=rngs)
def __call__(self, x):
self.count.value += 1
x = self.linear1(x)
x = jax.nn.relu(x)
x = self.linear2(x)
return x
y_pred = model(X)
def test_step(state: nnx.TrainState[MLP], batch):
x, y = batch
y_pred, _ = state.apply('params', 'counts')(x)
loss = jnp.mean((y - y_pred) ** 2)
return {'loss': loss} | null |
22,785 | import jax
from flax.experimental import nnx
def load_pretrained():
return nnx.Linear(784, 128, rngs=nnx.Rngs(0)) | null |
22,786 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
from flax.experimental import nnx
X = np.linspace(0, 1, 100)[:, None]
Y = 0.8 * X**2 + 0.1 + np.random.normal(0, 0.1, size=X.shape)
def dataset(batch_size):
while True:
idx = np.random.choice(len(X), size=batch_size)
yield X[idx], Y[idx] | null |
22,787 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din, dhidden, dout, *, rngs: nnx.Rngs):
self.count = Count(jnp.array(0))
self.linear1 = Linear(din, dhidden, rngs=rngs)
self.linear2 = Linear(dhidden, dout, rngs=rngs)
def __call__(self, x):
self.count.value += 1
x = self.linear1(x)
x = jax.nn.relu(x)
x = self.linear2(x)
return x
y_pred = model(X)
def train_step(model: MLP, batch):
x, y = batch
def loss_fn(model: MLP):
y_pred = model(x)
return jnp.mean((y - y_pred) ** 2)
# |--default--|
grad: nnx.State = nnx.grad(loss_fn, wrt=nnx.Param)(model)
# sdg update
model.update(
jax.tree_map(lambda w, g: w - 0.1 * g, model.extract(nnx.Param), grad)
)
# no return!!! | null |
22,788 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din, dhidden, dout, *, rngs: nnx.Rngs):
self.count = Count(jnp.array(0))
self.linear1 = Linear(din, dhidden, rngs=rngs)
self.linear2 = Linear(dhidden, dout, rngs=rngs)
def __call__(self, x):
self.count.value += 1
x = self.linear1(x)
x = jax.nn.relu(x)
x = self.linear2(x)
return x
y_pred = model(X)
def test_step(model: MLP, batch):
x, y = batch
y_pred = model(x)
loss = jnp.mean((y - y_pred) ** 2)
return {'loss': loss} | null |
22,789 | from tempfile import TemporaryDirectory
import jax
import jax.numpy as jnp
import orbax.checkpoint as orbax
from flax.experimental import nnx
def create_model(seed: int):
return MLP(10, 20, 30, rngs=nnx.Rngs(seed))
def create_and_save(seed: int, path: str):
model = create_model(seed)
state = model.get_state()
# Save the parameters
checkpointer = orbax.PyTreeCheckpointer()
checkpointer.save(f'{path}/state', state) | null |
22,790 | from tempfile import TemporaryDirectory
import jax
import jax.numpy as jnp
import orbax.checkpoint as orbax
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din: int, dmid: int, dout: int, *, rngs: nnx.Rngs):
def __call__(self, x: jax.Array) -> jax.Array:
def create_model(seed: int):
def load_model(path: str) -> MLP:
# create that model with abstract shapes
state, static = jax.eval_shape(lambda: create_model(0).split())
# Load the parameters
checkpointer = orbax.PyTreeCheckpointer()
state = checkpointer.restore(f'{path}/state', item=state)
# Merge the parameters into the model
model = static.merge(state)
return model | null |
22,792 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
from flax.experimental import nnx
class Count(nnx.Variable[nnx.A]):
pass
y_pred = model(X)
def train_step(params, counts, batch):
x, y = batch
def loss_fn(params):
y_pred, (updates, _) = modeldef.apply(params, counts)(x)
counts_ = updates.extract(Count)
loss = jnp.mean((y - y_pred) ** 2)
return loss, counts_
grad, counts = jax.grad(loss_fn, has_aux=True)(params)
# |-------- sgd ---------|
params = jax.tree_map(lambda w, g: w - 0.1 * g, params, grad)
return params, counts | null |
22,793 | import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
from flax.experimental import nnx
y_pred = model(X)
def test_step(params: nnx.State, counts: nnx.State, batch):
x, y = batch
y_pred, _ = modeldef.apply(params, counts)(x)
loss = jnp.mean((y - y_pred) ** 2)
return {'loss': loss} | null |
22,794 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
The provided code snippet includes necessary dependencies for implementing the `nd_dense_init` function. Write a Python function `def nd_dense_init(scale, mode, distribution)` to solve the following problem:
Initializer with in_axis, out_axis set at call time.
Here is the function:
def nd_dense_init(scale, mode, distribution):
"""Initializer with in_axis, out_axis set at call time."""
def init_fn(key, shape, dtype, in_axis, out_axis) -> jax.Array:
fn = jax.nn.initializers.variance_scaling(
scale, mode, distribution, in_axis, out_axis
)
return fn(key, shape, dtype)
return init_fn | Initializer with in_axis, out_axis set at call time. |
22,795 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
def make_attention_mask(
query_input: tp.Any,
key_input: tp.Any,
pairwise_fn: tp.Callable = jnp.multiply,
dtype: tp.Any = jnp.float32,
):
mask = pairwise_fn(
jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)
)
return jnp.expand_dims(mask, axis=-3).astype(dtype)
def make_causal_mask(x, dtype=jnp.float32):
idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)
return make_attention_mask(idxs, idxs, jnp.greater_equal, dtype=dtype) | null |
22,796 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
def sine_table(features, length, min_timescale=1.0, max_timescale=10000.0):
fraction = jnp.arange(0, features, 2, dtype=jnp.float32) / features
timescale = min_timescale * (max_timescale / min_timescale) ** fraction
rotational_frequency = 1.0 / timescale
# Must use high precision einsum here, bfloat16 rounding is catastrophic.
sinusoid_inp = jnp.einsum(
'i,j->ij',
jnp.arange(length),
rotational_frequency,
precision=jax.lax.Precision.HIGHEST,
)
sinusoid_inp = jnp.concatenate([sinusoid_inp, sinusoid_inp], axis=-1)
return jnp.sin(sinusoid_inp), jnp.cos(sinusoid_inp) | null |
22,797 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
def rotate_half(x):
x1, x2 = jnp.split(x, 2, axis=-1)
x = jnp.concatenate([-x2, x1], axis=-1)
return x
The provided code snippet includes necessary dependencies for implementing the `apply_rotary_embedding` function. Write a Python function `def apply_rotary_embedding(q, k, cos, sin, index=None)` to solve the following problem:
Helper function to apply Rotary Embeddings.
Here is the function:
def apply_rotary_embedding(q, k, cos, sin, index=None):
"""Helper function to apply Rotary Embeddings."""
batch, qlen, qheads, d = q.shape
kbatch, klen, kheads, kd = k.shape
if index is not None:
qcos = jax.lax.broadcast_in_dim(
cos[index, :], (batch, qlen, qheads, d), (3,)
)
qsin = jax.lax.broadcast_in_dim(
sin[index, :], (batch, qlen, qheads, d), (3,)
)
else:
qcos = jax.lax.broadcast_in_dim(
cos[:qlen, :], (batch, qlen, qheads, d), (1, 3)
)
qsin = jax.lax.broadcast_in_dim(
sin[:qlen, :], (batch, qlen, qheads, d), (1, 3)
)
kcos = jax.lax.broadcast_in_dim(
cos[:klen, :], (batch, klen, kheads, d), (1, 3)
)
ksin = jax.lax.broadcast_in_dim(
sin[:klen, :], (batch, klen, kheads, d), (1, 3)
)
out_q = (q * qcos) + (rotate_half(q) * qsin)
out_k = (k * kcos) + (rotate_half(k) * ksin)
return out_q, out_k | Helper function to apply Rotary Embeddings. |
22,798 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
def rms_norm(cfg, scale, x):
x = jnp.asarray(x, jnp.float32)
mean2 = jnp.mean(jax.lax.square(x), axis=-1, keepdims=True)
y = jnp.asarray(x * jax.lax.rsqrt(mean2 + cfg.epsilon), cfg.dtype)
return y * jnp.asarray(scale, cfg.dtype) | null |
22,799 | import dataclasses
import typing as tp
import jax
import jax.numpy as jnp
import numpy as np
from jax.sharding import PartitionSpec as P
from flax.experimental import nnx
class Config:
def dropout(cfg: Config, x, broadcast_dims=(-2,), *, rngs: nnx.Rngs):
if cfg.dropout_rate == 0.0:
return x
broadcast_shape = list(x.shape)
for dim in broadcast_dims:
broadcast_shape[dim] = 1
keep_rate = 1.0 - cfg.dropout_rate
key = rngs.dropout()
mask = jax.random.bernoulli(key, p=keep_rate, shape=broadcast_shape)
return jax.lax.select(
jnp.broadcast_to(mask, x.shape), x / keep_rate, jnp.zeros_like(x)
) | null |
22,800 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class Loss(nnx.Variable):
pass
class VAE(nnx.Module):
def __init__(
self,
din: int,
hidden_size: int,
latent_size: int,
output_shape: tp.Sequence[int],
*,
rngs: nnx.Rngs,
):
self.output_shape = output_shape
self.encoder = Encoder(din, hidden_size, latent_size, rngs=rngs)
self.decoder = Decoder(
latent_size, hidden_size, int(np.prod(output_shape)), rngs=rngs
)
def __call__(self, x: jax.Array, *, rngs: nnx.Rngs) -> jax.Array:
z = self.encoder(x, rngs=rngs)
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return logits
def generate(self, z):
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return nnx.sigmoid(logits)
params, static = VAE(
din=int(np.prod(image_shape)),
hidden_size=256,
latent_size=latent_size,
output_shape=image_shape,
rngs=nnx.Rngs(0),
).split(nnx.Param)
def train_step(state: nnx.TrainState[VAE], x: jax.Array, key: jax.Array):
def loss_fn(params: nnx.State):
rngs = nnx.Rngs(noise=jax.random.fold_in(key, state.step))
logits, (updates, _) = state.apply(params)(x, rngs=rngs)
losses = updates.extract(Loss)
kl_loss = sum(jax.tree_util.tree_leaves(losses), 0.0)
reconstruction_loss = jnp.mean(
optax.sigmoid_binary_cross_entropy(logits, x)
)
# jax.debug.print("kl_loss={kl_loss}", kl_loss=kl_loss)
loss = reconstruction_loss + 0.1 * kl_loss
return loss
loss, grads = jax.value_and_grad(loss_fn)(state.params)
state = state.apply_gradients(grads=grads)
return state, loss | null |
22,801 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class VAE(nnx.Module):
def __init__(
self,
din: int,
hidden_size: int,
latent_size: int,
output_shape: tp.Sequence[int],
*,
rngs: nnx.Rngs,
):
self.output_shape = output_shape
self.encoder = Encoder(din, hidden_size, latent_size, rngs=rngs)
self.decoder = Decoder(
latent_size, hidden_size, int(np.prod(output_shape)), rngs=rngs
)
def __call__(self, x: jax.Array, *, rngs: nnx.Rngs) -> jax.Array:
z = self.encoder(x, rngs=rngs)
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return logits
def generate(self, z):
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return nnx.sigmoid(logits)
y_pred = forward(state, x_sample, key)
def forward(
state: nnx.TrainState[VAE], x: jax.Array, key: jax.Array
) -> jax.Array:
rngs = nnx.Rngs(noise=key)
y_pred = state.apply('params')(x, rngs=rngs)[0]
return jax.nn.sigmoid(y_pred) | null |
22,802 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class VAE(nnx.Module):
def __init__(
self,
din: int,
hidden_size: int,
latent_size: int,
output_shape: tp.Sequence[int],
*,
rngs: nnx.Rngs,
):
self.output_shape = output_shape
self.encoder = Encoder(din, hidden_size, latent_size, rngs=rngs)
self.decoder = Decoder(
latent_size, hidden_size, int(np.prod(output_shape)), rngs=rngs
)
def __call__(self, x: jax.Array, *, rngs: nnx.Rngs) -> jax.Array:
z = self.encoder(x, rngs=rngs)
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return logits
def generate(self, z):
logits = self.decoder(z)
logits = jnp.reshape(logits, (-1, *self.output_shape))
return nnx.sigmoid(logits)
def sample(state: nnx.TrainState[VAE], z: jax.Array) -> jax.Array:
return state.apply('params').generate(z)[0] | null |
22,803 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
def diff_round(x) -> jax.Array:
diff_round.defvjp(diff_round_fwd, diff_round_bwd)
def diff_round_fwd(x):
return diff_round(x), None | null |
22,804 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
def diff_round_bwd(_, g):
return (g,) | null |
22,805 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
def diff_clip(x, low, high) -> jax.Array:
return jnp.clip(x, low, high)
diff_clip.defvjp(diff_clip_fwd, diff_clip_bwd)
def diff_clip_fwd(x, low, high):
return diff_clip(x, low, high), None | null |
22,806 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
def diff_clip_bwd(_, _1, _2, dy):
return (dy,) | null |
22,807 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
def diff_round(x) -> jax.Array:
y = jnp.round(x)
return y
diff_round.defvjp(diff_round_fwd, diff_round_bwd)
def diff_clip(x, low, high) -> jax.Array:
return jnp.clip(x, low, high)
diff_clip.defvjp(diff_clip_fwd, diff_clip_bwd)
def f(x):
return diff_clip(diff_round(x * 128) + 128, 0, 255) | null |
22,808 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din: int, dmid: int, dout: int, *, rngs: nnx.Rngs):
self.linear1 = nnx.Linear(din, dmid, rngs=rngs)
self.linear2 = nnx.Linear(dmid, dout, rngs=rngs)
def __call__(self, x: jax.Array) -> jax.Array:
x = x.reshape((x.shape[0], -1))
x = self.linear1(x)
x = jax.nn.gelu(x)
x = self.linear2(x)
return x
params, static = MLP(
din=np.prod(image_shape), dmid=256, dout=10, rngs=nnx.Rngs(0)
).split(nnx.Param)
def train_step(
state: nnx.TrainState[MLP],
inputs: jax.Array,
labels: jax.Array,
):
def loss_fn(params: nnx.State):
logits, _ = state.apply(params)(inputs)
loss = jnp.mean(
optax.softmax_cross_entropy_with_integer_labels(logits, labels)
)
return loss
grad_fn = jax.value_and_grad(loss_fn)
loss, grads = grad_fn(state.params)
state = state.apply_gradients(grads=grads)
return state, loss | null |
22,809 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din: int, dmid: int, dout: int, *, rngs: nnx.Rngs):
self.linear1 = nnx.Linear(din, dmid, rngs=rngs)
self.linear2 = nnx.Linear(dmid, dout, rngs=rngs)
def __call__(self, x: jax.Array) -> jax.Array:
x = x.reshape((x.shape[0], -1))
x = self.linear1(x)
x = jax.nn.gelu(x)
x = self.linear2(x)
return x
def eval_step(state: nnx.TrainState[MLP], inputs: jax.Array, labels: jax.Array):
logits, _ = state.apply('params')(inputs)
loss = jnp.mean(
optax.softmax_cross_entropy_with_integer_labels(logits, labels)
)
acc = jnp.mean(jnp.argmax(logits, axis=-1) == labels)
return {'loss': loss, 'accuracy': acc} | null |
22,810 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
class MLP(nnx.Module):
def __init__(self, din: int, dmid: int, dout: int, *, rngs: nnx.Rngs):
self.linear1 = nnx.Linear(din, dmid, rngs=rngs)
self.linear2 = nnx.Linear(dmid, dout, rngs=rngs)
def __call__(self, x: jax.Array) -> jax.Array:
x = x.reshape((x.shape[0], -1))
x = self.linear1(x)
x = jax.nn.gelu(x)
x = self.linear2(x)
return x
y_pred = forward(state, x_sample)
def forward(state: nnx.TrainState[MLP], inputs: jax.Array) -> jax.Array:
y_pred = state.apply('params')(inputs)[0]
return jnp.argmax(y_pred, axis=-1) | null |
22,811 | import typing as tp
from functools import partial
import jax
import jax.numpy as jnp
import matplotlib.pyplot as plt
import numpy as np
import optax
from datasets import load_dataset
from flax.experimental import nnx
x = jnp.linspace(-1.5, 1.5, 100)
print('X_train:', X_train.shape, X_train.dtype)
print('X_test:', X_test.shape, X_test.dtype)
def quantization_int8(x, s, z):
x_q = quantization(x, s, z, alpha_q=-128, beta_q=127)
x_q = x_q.astype(jnp.int8)
return x_q
def dequantization(x_q, s, z):
# x_q - z might go outside the quantization range.
x_q = x_q.astype(jnp.int32)
x = s * (x_q - z)
x = x.astype(jnp.float32)
return x
def generate_quantization_int8_constants(alpha, beta):
b = 8
alpha_q = -(2 ** (b - 1))
beta_q = 2 ** (b - 1) - 1
s, z = generate_quantization_constants(
alpha=alpha, beta=beta, alpha_q=alpha_q, beta_q=beta_q
)
return s, z
def quantization_matrix_multiplication_int8(
X_q, W_q, b_q, s_X, z_X, s_W, z_W, s_b, z_b, s_Y, z_Y
):
p = W_q.shape[0]
# Y_q_simulated is FP32
Y_q_simulated = (
z_Y
+ (s_b / s_Y * (b_q.astype(jnp.int32) - z_b))
+ (
(s_X * s_W / s_Y)
* (
jnp.matmul(X_q.astype(jnp.int32), W_q.astype(jnp.int32))
- z_W * jnp.sum(X_q.astype(jnp.int32), axis=1, keepdims=True)
- z_X * jnp.sum(W_q.astype(jnp.int32), axis=0, keepdims=True)
+ p * z_X * z_W
)
)
)
Y_q_simulated = jnp.round(Y_q_simulated, decimals=0)
Y_q_simulated = jnp.clip(Y_q_simulated, a_min=-128, a_max=127)
Y_q_simulated = Y_q_simulated.astype(jnp.int8)
return Y_q_simulated
print(x_optimize.shape)
def optimize2(
self,
pretrained: nnx.Linear,
X: jax.Array,
):
W = pretrained.kernel
b = pretrained.bias
assert b is not None
# X
alpha_X = jnp.min(X)
beta_X = jnp.max(X)
s_X, z_X = generate_quantization_int8_constants(alpha=alpha_X, beta=beta_X)
X_q = quantization_int8(x=X, s=s_X, z=z_X)
X_q_dq = dequantization(x_q=X_q, s=s_X, z=z_X)
# W
alpha_W = jnp.min(W)
beta_W = jnp.max(W)
s_W, z_W = generate_quantization_int8_constants(alpha=alpha_W, beta=beta_W)
W_q = quantization_int8(x=W, s=s_W, z=z_W)
W_q_dq = dequantization(x_q=W_q, s=s_W, z=z_W)
# b
alpha_b = jnp.min(b)
beta_b = jnp.max(b)
s_b, z_b = generate_quantization_int8_constants(alpha=alpha_b, beta=beta_b)
b_q = quantization_int8(x=b, s=s_b, z=z_b)
b_q_dq = dequantization(x_q=b_q, s=s_b, z=z_b)
# Y
Y = jnp.matmul(X, W) + b
alpha_Y = jnp.min(Y)
beta_Y = jnp.max(Y)
s_Y, z_Y = generate_quantization_int8_constants(alpha=alpha_Y, beta=beta_Y)
Y_q = quantization_int8(x=Y, s=s_Y, z=z_Y)
Y_prime = jnp.matmul(X_q_dq, W_q_dq) + b_q_dq
Y_prime_q = quantization_int8(x=Y_prime, s=s_Y, z=z_Y)
Y_prime_q_dq = dequantization(x_q=Y_prime_q, s=s_Y, z=z_Y)
print('Expected FP32 Y:')
print(Y)
print('Expected FP32 Y Quantized:')
print(Y_q)
Y_q_simulated = quantization_matrix_multiplication_int8(
X_q=X_q,
W_q=W_q,
b_q=b_q,
s_X=s_X,
z_X=z_X,
s_W=s_W,
z_W=z_W,
s_b=s_b,
z_b=z_b,
s_Y=s_Y,
z_Y=z_Y,
)
Y_simulated = dequantization(x_q=Y_q_simulated, s=s_Y, z=z_Y)
print('Expected Quantized Y_q from Quantized Matrix Multiplication:')
print(Y_q_simulated)
print(
'Expected Quantized Y_q from Quantized Matrix Multiplication Dequantized:'
)
print(Y_simulated)
# Ensure the algorithm implementation is correct
assert jnp.array_equal(Y_simulated, Y_prime_q_dq)
assert jnp.array_equal(Y_q_simulated, Y_prime_q) | null |
22,812 | import abc
import copy
import dataclasses
import warnings
from typing import Any, Callable
import jax
import flax
from flax.core.scope import VariableDict
from flax.typing import PathParts
from . import struct
empty_node = _EmptyNode()
def flatten_dict(xs, keep_empty_nodes=False, is_leaf=None, sep=None):
"""Flatten a nested dictionary.
The nested keys are flattened to a tuple.
See ``unflatten_dict`` on how to restore the
nested dictionary structure.
Example::
>>> from flax.traverse_util import flatten_dict
>>> xs = {'foo': 1, 'bar': {'a': 2, 'b': {}}}
>>> flat_xs = flatten_dict(xs)
>>> flat_xs
{('foo',): 1, ('bar', 'a'): 2}
Note that empty dictionaries are ignored and
will not be restored by ``unflatten_dict``.
Args:
xs: a nested dictionary
keep_empty_nodes: replaces empty dictionaries
with ``traverse_util.empty_node``.
is_leaf: an optional function that takes the
next nested dictionary and nested keys and
returns True if the nested dictionary is a
leaf (i.e., should not be flattened further).
sep: if specified, then the keys of the returned
dictionary will be ``sep``-joined strings (if
``None``, then keys will be tuples).
Returns:
The flattened dictionary.
"""
assert isinstance(
xs, (flax.core.FrozenDict, dict)
), f'expected (frozen)dict; got {type(xs)}'
def _key(path):
if sep is None:
return path
return sep.join(path)
def _flatten(xs, prefix):
if not isinstance(xs, (flax.core.FrozenDict, dict)) or (
is_leaf and is_leaf(prefix, xs)
):
return {_key(prefix): xs}
result = {}
is_empty = True
for key, value in xs.items():
is_empty = False
path = prefix + (key,)
result.update(_flatten(value, path))
if keep_empty_nodes and is_empty:
if prefix == (): # when the whole input is empty
return {}
return {_key(prefix): empty_node}
return result
return _flatten(xs, ())
def unflatten_dict(xs, sep=None):
"""Unflatten a dictionary.
See ``flatten_dict``
Example::
>>> flat_xs = {
... ('foo',): 1,
... ('bar', 'a'): 2,
... }
>>> xs = unflatten_dict(flat_xs)
>>> xs
{'foo': 1, 'bar': {'a': 2}}
Args:
xs: a flattened dictionary
sep: separator (same as used with ``flatten_dict()``).
Returns:
The nested dictionary.
"""
assert isinstance(xs, dict), f'input is not a dict; it is a {type(xs)}'
result = {}
for path, value in xs.items():
if sep is not None:
path = path.split(sep)
if value is empty_node:
value = {}
cursor = result
for key in path[:-1]:
if key not in cursor:
cursor[key] = {}
cursor = cursor[key]
cursor[path[-1]] = value
return result
PathParts = Tuple[str, ...]
The provided code snippet includes necessary dependencies for implementing the `path_aware_map` function. Write a Python function `def path_aware_map( f: Callable[[PathParts, Any], Any], nested_dict: VariableDict ) -> VariableDict` to solve the following problem:
A map function that operates over nested dictionary structures while taking the path to each leaf into account. Example:: >>> import jax.numpy as jnp >>> from flax import traverse_util >>> params = {'a': {'x': 10, 'y': 3}, 'b': {'x': 20}} >>> f = lambda path, x: x + 5 if 'x' in path else -x >>> traverse_util.path_aware_map(f, params) {'a': {'x': 15, 'y': -3}, 'b': {'x': 25}} Args: f: A callable that takes in ``(path, value)`` arguments and maps them to a new value. Here ``path`` is a tuple of strings. nested_dict: A nested dictionary structure. Returns: A new nested dictionary structure with the mapped values.
Here is the function:
def path_aware_map(
f: Callable[[PathParts, Any], Any], nested_dict: VariableDict
) -> VariableDict:
"""A map function that operates over nested dictionary structures while taking
the path to each leaf into account.
Example::
>>> import jax.numpy as jnp
>>> from flax import traverse_util
>>> params = {'a': {'x': 10, 'y': 3}, 'b': {'x': 20}}
>>> f = lambda path, x: x + 5 if 'x' in path else -x
>>> traverse_util.path_aware_map(f, params)
{'a': {'x': 15, 'y': -3}, 'b': {'x': 25}}
Args:
f: A callable that takes in ``(path, value)`` arguments and maps them
to a new value. Here ``path`` is a tuple of strings.
nested_dict: A nested dictionary structure.
Returns:
A new nested dictionary structure with the mapped values.
"""
flat = flatten_dict(nested_dict, keep_empty_nodes=True)
return unflatten_dict(
{k: f(k, v) if v is not empty_node else v for k, v in flat.items()}
) | A map function that operates over nested dictionary structures while taking the path to each leaf into account. Example:: >>> import jax.numpy as jnp >>> from flax import traverse_util >>> params = {'a': {'x': 10, 'y': 3}, 'b': {'x': 20}} >>> f = lambda path, x: x + 5 if 'x' in path else -x >>> traverse_util.path_aware_map(f, params) {'a': {'x': 15, 'y': -3}, 'b': {'x': 25}} Args: f: A callable that takes in ``(path, value)`` arguments and maps them to a new value. Here ``path`` is a tuple of strings. nested_dict: A nested dictionary structure. Returns: A new nested dictionary structure with the mapped values. |
22,813 | import abc
import copy
import dataclasses
import warnings
from typing import Any, Callable
import jax
import flax
from flax.core.scope import VariableDict
from flax.typing import PathParts
from . import struct
def _is_namedtuple(t):
return issubclass(t, tuple) and hasattr(t, '_fields') | null |
22,814 | import abc
import copy
import dataclasses
import warnings
from typing import Any, Callable
import jax
import flax
from flax.core.scope import VariableDict
from flax.typing import PathParts
from . import struct
def _get_params_dict(inputs):
if isinstance(inputs, (dict, flax.core.FrozenDict)):
return flax.core.unfreeze(inputs)
else:
raise ValueError(
'Can only traverse a flax Model instance or a nested dict, not '
f'{type(inputs)}'
) | null |
22,815 | import setuptools
with open("README.md", "r", encoding="utf8") as fh:
long_description = fh.read()
def _get_version():
with open('rlcard/__init__.py') as f:
for line in f:
if line.startswith('__version__'):
g = {}
exec(line, g)
return g['__version__']
raise ValueError('`__version__` not defined') | null |
22,816 | import os
import argparse
import torch
import rlcard
from rlcard.agents import RandomAgent
from rlcard.utils import (
get_device,
set_seed,
tournament,
reorganize,
Logger,
plot_curve,
)
def train(args):
# Check whether gpu is available
device = get_device()
# Seed numpy, torch, random
set_seed(args.seed)
# Make the environment with seed
env = rlcard.make(
args.env,
config={
'seed': args.seed,
}
)
# Initialize the agent and use random agents as opponents
if args.algorithm == 'dqn':
from rlcard.agents import DQNAgent
if args.load_checkpoint_path != "":
agent = DQNAgent.from_checkpoint(checkpoint=torch.load(args.load_checkpoint_path))
else:
agent = DQNAgent(
num_actions=env.num_actions,
state_shape=env.state_shape[0],
mlp_layers=[64,64],
device=device,
save_path=args.log_dir,
save_every=args.save_every
)
elif args.algorithm == 'nfsp':
from rlcard.agents import NFSPAgent
if args.load_checkpoint_path != "":
agent = NFSPAgent.from_checkpoint(checkpoint=torch.load(args.load_checkpoint_path))
else:
agent = NFSPAgent(
num_actions=env.num_actions,
state_shape=env.state_shape[0],
hidden_layers_sizes=[64,64],
q_mlp_layers=[64,64],
device=device,
save_path=args.log_dir,
save_every=args.save_every
)
agents = [agent]
for _ in range(1, env.num_players):
agents.append(RandomAgent(num_actions=env.num_actions))
env.set_agents(agents)
# Start training
with Logger(args.log_dir) as logger:
for episode in range(args.num_episodes):
if args.algorithm == 'nfsp':
agents[0].sample_episode_policy()
# Generate data from the environment
trajectories, payoffs = env.run(is_training=True)
# Reorganaize the data to be state, action, reward, next_state, done
trajectories = reorganize(trajectories, payoffs)
# Feed transitions into agent memory, and train the agent
# Here, we assume that DQN always plays the first position
# and the other players play randomly (if any)
for ts in trajectories[0]:
agent.feed(ts)
# Evaluate the performance. Play with random agents.
if episode % args.evaluate_every == 0:
logger.log_performance(
episode,
tournament(
env,
args.num_eval_games,
)[0]
)
# Get the paths
csv_path, fig_path = logger.csv_path, logger.fig_path
# Plot the learning curve
plot_curve(csv_path, fig_path, args.algorithm)
# Save model
save_path = os.path.join(args.log_dir, 'model.pth')
torch.save(agent, save_path)
print('Model saved in', save_path) | null |
22,817 | import os
import argparse
import torch
from pettingzoo.classic import (
leduc_holdem_v4,
texas_holdem_v4,
texas_holdem_no_limit_v6,
gin_rummy_v4,
)
from rlcard.agents.pettingzoo_agents import RandomAgentPettingZoo
from rlcard.utils import (
get_device,
set_seed,
Logger,
plot_curve,
run_game_pettingzoo,
reorganize_pettingzoo,
tournament_pettingzoo,
)
env_name_to_env_func = {
"leduc-holdem": leduc_holdem_v4,
"limit-holdem": texas_holdem_v4,
"no-limit-holdem": texas_holdem_no_limit_v6,
"gin-rummy": gin_rummy_v4,
}
class NFSPAgentPettingZoo(NFSPAgent):
def step(self, state):
def eval_step(self, state):
def feed(self, ts):
class DQNAgentPettingZoo(DQNAgent):
def step(self, state):
def eval_step(self, state):
def feed(self, ts):
class RandomAgentPettingZoo(RandomAgent):
def step(self, state):
def eval_step(self, state):
def train(args):
# Check whether gpu is available
device = get_device()
# Seed numpy, torch, random
set_seed(args.seed)
# Make the environment with seed
env_func = env_name_to_env_func[args.env]
env = env_func.env()
env.reset(seed=args.seed)
# Initialize the agent and use random agents as opponents
learning_agent_name = env.agents[0]
if args.algorithm == 'dqn':
from rlcard.agents.pettingzoo_agents import DQNAgentPettingZoo
agent = DQNAgentPettingZoo(
num_actions=env.action_space(learning_agent_name).n,
state_shape=env.observation_space(learning_agent_name)["observation"].shape,
mlp_layers=[64,64],
device=device
)
elif args.algorithm == 'nfsp':
from rlcard.agents.pettingzoo_agents import NFSPAgentPettingZoo
agent = NFSPAgentPettingZoo(
num_actions=env.action_space(learning_agent_name).n,
state_shape=env.observation_space(learning_agent_name)["observation"].shape,
hidden_layers_sizes=[64,64],
q_mlp_layers=[64,64],
device=device
)
agents = {learning_agent_name: agent}
for i in range(1, env.num_agents):
agents[env.agents[i]] = RandomAgentPettingZoo(num_actions=env.action_space(env.agents[i]).n)
# Start training
num_timesteps = 0
with Logger(args.log_dir) as logger:
for episode in range(args.num_episodes):
if args.algorithm == 'nfsp':
agent.sample_episode_policy()
# Generate data from the environment
trajectories = run_game_pettingzoo(env, agents, is_training=True)
trajectories = reorganize_pettingzoo(trajectories)
num_timesteps += sum([len(t) for t in trajectories.values()])
for ts in trajectories[learning_agent_name]:
agent.feed(ts)
# Evaluate the performance. Play with random agents.
if episode % args.evaluate_every == 0:
average_rewards = tournament_pettingzoo(env, agents, args.num_eval_games)
logger.log_performance(episode, average_rewards[learning_agent_name])
# Get the paths
csv_path, fig_path = logger.csv_path, logger.fig_path
# Plot the learning curve
plot_curve(csv_path, fig_path, args.algorithm)
# Save model
save_path = os.path.join(args.log_dir, 'model.pth')
torch.save(agent, save_path)
print('Model saved in', save_path) | null |
22,818 | import os
import argparse
from pettingzoo.classic import (
leduc_holdem_v4,
texas_holdem_v4,
dou_dizhu_v4,
mahjong_v4,
texas_holdem_no_limit_v6,
uno_v4,
gin_rummy_v4,
)
from rlcard.agents.dmc_agent import DMCTrainer
env_name_to_env_func = {
"leduc-holdem": leduc_holdem_v4,
"limit-holdem": texas_holdem_v4,
"doudizhu": dou_dizhu_v4,
"mahjong": mahjong_v4,
"no-limit-holdem": texas_holdem_no_limit_v6,
"uno": uno_v4,
"gin-rummy": gin_rummy_v4,
}
def train(args):
# Make the environment
env_func = env_name_to_env_func[args.env]
env = env_func.env()
env.reset()
# Initialize the DMC trainer
trainer = DMCTrainer(
env,
is_pettingzoo_env=True,
load_model=args.load_model,
xpid=args.xpid,
savedir=args.savedir,
save_interval=args.save_interval,
num_actor_devices=args.num_actor_devices,
num_actors=args.num_actors,
training_device=args.training_device,
total_frames=args.total_frames,
)
# Train DMC Agents
trainer.start() | null |
22,819 | import argparse
import pprint
import rlcard
from rlcard.agents import RandomAgent
from rlcard.utils import set_seed
def run(args):
# Make environment
env = rlcard.make(
args.env,
config={
'seed': 42,
}
)
# Seed numpy, torch, random
set_seed(42)
# Set agents
agent = RandomAgent(num_actions=env.num_actions)
env.set_agents([agent for _ in range(env.num_players)])
# Generate data from the environment
trajectories, player_wins = env.run(is_training=False)
# Print out the trajectories
print('\nTrajectories:')
print(trajectories)
print('\nSample raw observation:')
pprint.pprint(trajectories[0][0]['raw_obs'])
print('\nSample raw legal_actions:')
pprint.pprint(trajectories[0][0]['raw_legal_actions']) | null |
22,820 | from typing import TYPE_CHECKING
import rlcard
from rlcard.agents import RandomAgent
from rlcard.models.gin_rummy_rule_models import GinRummyNoviceRuleAgent
from rlcard.agents.human_agents.gin_rummy_human_agent.gin_rummy_human_agent import HumanAgent
from rlcard.agents.human_agents.gin_rummy_human_agent.gui_gin_rummy.game_app import GameApp
from rlcard.games.gin_rummy.utils import scorers
class GinRummyNoviceRuleAgent(object):
'''
Agent always discards highest deadwood value card
'''
def __init__(self):
self.use_raw = False # FIXME: should this be True ?
def step(state):
''' Predict the action given the current state.
Novice strategy:
Case where can gin:
Choose one of the gin actions.
Case where can knock:
Choose one of the knock actions.
Case where can discard:
Gin if can. Knock if can.
Otherwise, put aside cards in some best meld cluster.
Choose one of the remaining cards with highest deadwood value.
Discard that card.
Case otherwise:
Choose a random action.
Args:
state (numpy.array): an numpy array that represents the current state
Returns:
action (int): the action predicted
'''
legal_actions = state['legal_actions']
actions = legal_actions.copy()
legal_action_events = [ActionEvent.decode_action(x) for x in legal_actions]
gin_action_events = [x for x in legal_action_events if isinstance(x, GinAction)]
knock_action_events = [x for x in legal_action_events if isinstance(x, KnockAction)]
discard_action_events = [x for x in legal_action_events if isinstance(x, DiscardAction)]
if gin_action_events:
actions = [x.action_id for x in gin_action_events]
elif knock_action_events:
actions = [x.action_id for x in knock_action_events]
elif discard_action_events:
best_discards = GinRummyNoviceRuleAgent._get_best_discards(discard_action_events=discard_action_events,
state=state)
if best_discards:
actions = [DiscardAction(card=card).action_id for card in best_discards]
if type(actions) == OrderedDict:
actions = list(actions.keys())
return np.random.choice(actions)
def eval_step(self, state):
''' Predict the action given the current state for evaluation.
Since the agents is not trained, this function is equivalent to step function.
Args:
state (numpy.array): an numpy array that represents the current state
Returns:
action (int): the action predicted by the agent
probabilities (list): The list of action probabilities
'''
probabilities = []
return self.step(state), probabilities
def _get_best_discards(discard_action_events, state) -> List[Card]:
best_discards = [] # type: List[Card]
final_deadwood_count = 999
env_hand = state['obs'][0]
hand = utils.decode_cards(env_cards=env_hand)
for discard_action_event in discard_action_events:
discard_card = discard_action_event.card
next_hand = [card for card in hand if card != discard_card]
meld_clusters = melding.get_meld_clusters(hand=next_hand)
deadwood_counts = []
for meld_cluster in meld_clusters:
deadwood_count = utils.get_deadwood_count(hand=next_hand, meld_cluster=meld_cluster)
deadwood_counts.append(deadwood_count)
best_deadwood_count = min(deadwood_counts,
default=utils.get_deadwood_count(hand=next_hand, meld_cluster=[]))
if best_deadwood_count < final_deadwood_count:
final_deadwood_count = best_deadwood_count
best_discards = [discard_card]
elif best_deadwood_count == final_deadwood_count:
best_discards.append(discard_card)
return best_discards
class HumanAgent(object):
''' A human agent for Gin Rummy. It can be used to play against trained models.
'''
def __init__(self, num_actions):
''' Initialize the human agent
Args:
num_actions (int): the size of the output action space
'''
self.use_raw = True
self.num_actions = num_actions
self.is_choosing_action_id = False
self.chosen_action_id = None # type: int or None
self.state = None
def step(self, state):
''' Human agent will display the state and make decisions through interfaces
Args:
state (dict): A dictionary that represents the current state
Returns:
action (int): The action decided by human
'''
if self.is_choosing_action_id:
raise GinRummyProgramError("self.is_choosing_action_id must be False.")
if self.state is not None:
raise GinRummyProgramError("self.state must be None.")
if self.chosen_action_id is not None:
raise GinRummyProgramError("self.chosen_action_id={} must be None.".format(self.chosen_action_id))
self.state = state
self.is_choosing_action_id = True
while not self.chosen_action_id:
time.sleep(0.001)
if self.chosen_action_id is None:
raise GinRummyProgramError("self.chosen_action_id cannot be None.")
chosen_action_event = ActionEvent.decode_action(action_id=self.chosen_action_id)
self.state = None
self.is_choosing_action_id = False
self.chosen_action_id = None
return chosen_action_event
def eval_step(self, state):
''' Predict the action given the current state for evaluation. The same to step here.
Args:
state (numpy.array): an numpy array that represents the current state
Returns:
action (int): the action predicted (randomly chosen) by the random agent
'''
return self.step(state), {}
def make_gin_rummy_env() -> 'GinRummyEnv':
gin_rummy_env = rlcard.make('gin-rummy')
# north_agent = RandomAgent(num_actions=gin_rummy_env.num_actions)
north_agent = GinRummyNoviceRuleAgent()
south_agent = HumanAgent(gin_rummy_env.num_actions)
gin_rummy_env.set_agents([
north_agent,
south_agent
])
gin_rummy_env.game.judge.scorer = scorers.GinRummyScorer(get_payoff=scorers.get_payoff_gin_rummy_v0)
return gin_rummy_env | null |
22,821 | import os
import argparse
import torch
import rlcard
from rlcard.agents.dmc_agent import DMCTrainer
def train(args):
# Make the environment
env = rlcard.make(args.env)
# Initialize the DMC trainer
trainer = DMCTrainer(
env,
cuda=args.cuda,
load_model=args.load_model,
xpid=args.xpid,
savedir=args.savedir,
save_interval=args.save_interval,
num_actor_devices=args.num_actor_devices,
num_actors=args.num_actors,
training_device=args.training_device,
)
# Train DMC Agents
trainer.start() | null |
22,822 | import os
import argparse
import rlcard
from rlcard.agents import (
DQNAgent,
RandomAgent,
)
from rlcard.utils import (
get_device,
set_seed,
tournament,
)
def load_model(model_path, env=None, position=None, device=None):
if os.path.isfile(model_path): # Torch model
import torch
agent = torch.load(model_path, map_location=device)
agent.set_device(device)
elif os.path.isdir(model_path): # CFR model
from rlcard.agents import CFRAgent
agent = CFRAgent(env, model_path)
agent.load()
elif model_path == 'random': # Random model
from rlcard.agents import RandomAgent
agent = RandomAgent(num_actions=env.num_actions)
else: # A model in the model zoo
from rlcard import models
agent = models.load(model_path).agents[position]
return agent
def evaluate(args):
# Check whether gpu is available
device = get_device()
# Seed numpy, torch, random
set_seed(args.seed)
# Make the environment with seed
env = rlcard.make(args.env, config={'seed': args.seed})
# Load models
agents = []
for position, model_path in enumerate(args.models):
agents.append(load_model(model_path, env, position, device))
env.set_agents(agents)
# Evaluate
rewards = tournament(env, args.num_games)
for position, reward in enumerate(rewards):
print(position, args.models[position], reward) | null |
22,823 | import os
import argparse
import rlcard
from rlcard.agents import (
CFRAgent,
RandomAgent,
)
from rlcard.utils import (
set_seed,
tournament,
Logger,
plot_curve,
)
def train(args):
# Make environments, CFR only supports Leduc Holdem
env = rlcard.make(
'leduc-holdem',
config={
'seed': 0,
'allow_step_back': True,
}
)
eval_env = rlcard.make(
'leduc-holdem',
config={
'seed': 0,
}
)
# Seed numpy, torch, random
set_seed(args.seed)
# Initilize CFR Agent
agent = CFRAgent(
env,
os.path.join(
args.log_dir,
'cfr_model',
),
)
agent.load() # If we have saved model, we first load the model
# Evaluate CFR against random
eval_env.set_agents([
agent,
RandomAgent(num_actions=env.num_actions),
])
# Start training
with Logger(args.log_dir) as logger:
for episode in range(args.num_episodes):
agent.train()
print('\rIteration {}'.format(episode), end='')
# Evaluate the performance. Play with Random agents.
if episode % args.evaluate_every == 0:
agent.save() # Save model
logger.log_performance(
episode,
tournament(
eval_env,
args.num_eval_games
)[0]
)
# Get the paths
csv_path, fig_path = logger.csv_path, logger.fig_path
# Plot the learning curve
plot_curve(csv_path, fig_path, 'cfr') | null |
22,824 | from rlcard.utils.utils import print_card
def print_card(cards):
''' Nicely print a card or list of cards
Args:
card (string or list): The card(s) to be printed
'''
if cards is None:
cards = [None]
if isinstance(cards, str):
cards = [cards]
lines = [[] for _ in range(9)]
for card in cards:
if card is None:
lines[0].append('┌─────────┐')
lines[1].append('│░░░░░░░░░│')
lines[2].append('│░░░░░░░░░│')
lines[3].append('│░░░░░░░░░│')
lines[4].append('│░░░░░░░░░│')
lines[5].append('│░░░░░░░░░│')
lines[6].append('│░░░░░░░░░│')
lines[7].append('│░░░░░░░░░│')
lines[8].append('└─────────┘')
else:
if isinstance(card, Card):
elegent_card = elegent_form(card.suit + card.rank)
else:
elegent_card = elegent_form(card)
suit = elegent_card[0]
rank = elegent_card[1]
if len(elegent_card) == 3:
space = elegent_card[2]
else:
space = ' '
lines[0].append('┌─────────┐')
lines[1].append('│{}{} │'.format(rank, space))
lines[2].append('│ │')
lines[3].append('│ │')
lines[4].append('│ {} │'.format(suit))
lines[5].append('│ │')
lines[6].append('│ │')
lines[7].append('│ {}{}│'.format(space, rank))
lines[8].append('└─────────┘')
for line in lines:
print (' '.join(line))
The provided code snippet includes necessary dependencies for implementing the `_print_state` function. Write a Python function `def _print_state(state, action_record)` to solve the following problem:
Print out the state Args: state (dict): A dictionary of the raw state action_record (list): A list of the historical actions
Here is the function:
def _print_state(state, action_record):
''' Print out the state
Args:
state (dict): A dictionary of the raw state
action_record (list): A list of the historical actions
'''
_action_list = []
for i in range(1, len(action_record)+1):
if action_record[-i][0] == state['current_player']:
break
_action_list.insert(0, action_record[-i])
for pair in _action_list:
print('>> Player', pair[0], 'chooses', pair[1])
print('\n=============== Community Card ===============')
print_card(state['public_card'])
print('=============== Your Hand ===============')
print_card(state['hand'])
print('=============== Chips ===============')
print('Yours: ', end='')
for _ in range(state['my_chips']):
print('+', end='')
print('')
for i in range(len(state['all_chips'])):
if i != state['current_player']:
print('Agent {}: '.format(i) , end='')
for _ in range(state['all_chips'][i]):
print('+', end='')
print('\n=========== Actions You Can Choose ===========')
print(', '.join([str(index) + ': ' + action for index, action in enumerate(state['legal_actions'])]))
print('') | Print out the state Args: state (dict): A dictionary of the raw state action_record (list): A list of the historical actions |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.