medmekk HF Staff commited on
Commit
5d24fa7
·
verified ·
1 Parent(s): 4a273f4

Build uploaded using `kernels`.

Browse files
Files changed (19) hide show
  1. .gitattributes +2 -0
  2. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__init__.py +3 -3
  3. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc +0 -0
  4. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc +0 -0
  5. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/functions.cpython-313.pyc +0 -0
  6. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc +0 -0
  7. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/{_mlx_rmsnorm_57d865f.abi3.so → _mlx_rmsnorm_06e17fa.abi3.so} +1 -1
  8. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_ops.py +3 -3
  9. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/functions.py +23 -0
  10. build/torch28-metal-aarch64-darwin/mlx_rmsnorm/layers.py +1 -22
  11. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__init__.py +3 -3
  12. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc +0 -0
  13. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc +0 -0
  14. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/functions.cpython-313.pyc +0 -0
  15. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc +0 -0
  16. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/{_mlx_rmsnorm_57d865f.abi3.so → _mlx_rmsnorm_06e17fa.abi3.so} +1 -1
  17. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_ops.py +3 -3
  18. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/functions.py +23 -0
  19. build/torch29-metal-aarch64-darwin/mlx_rmsnorm/layers.py +1 -22
.gitattributes CHANGED
@@ -37,3 +37,5 @@ build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_97571a8_dirty.abi3.s
37
  build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_97571a8_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text
38
  build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_57d865f.abi3.so filter=lfs diff=lfs merge=lfs -text
39
  build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_57d865f.abi3.so filter=lfs diff=lfs merge=lfs -text
 
 
 
37
  build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_97571a8_dirty.abi3.so filter=lfs diff=lfs merge=lfs -text
38
  build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_57d865f.abi3.so filter=lfs diff=lfs merge=lfs -text
39
  build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_57d865f.abi3.so filter=lfs diff=lfs merge=lfs -text
40
+ build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_06e17fa.abi3.so filter=lfs diff=lfs merge=lfs -text
41
+ build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_mlx_rmsnorm_06e17fa.abi3.so filter=lfs diff=lfs merge=lfs -text
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- from .layers import RMSNorm, rmsnorm_forward, rmsnorm_backward
2
-
3
- __all__ = ["RMSNorm", "rmsnorm_forward", "rmsnorm_backward"]
 
1
+ from .functions import rmsnorm_forward, rmsnorm_backward
2
+ from . import layers
3
+ __all__ = ["layers", "rmsnorm_forward", "rmsnorm_backward"]
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc and b/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc and b/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/functions.cpython-313.pyc ADDED
Binary file (1.91 kB). View file
 
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc and b/build/torch28-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/{_mlx_rmsnorm_57d865f.abi3.so → _mlx_rmsnorm_06e17fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:22dcc9b26a28ff2c1cc34697bd25bf55eeb9cd9d4f1eb32a352ed2343fecbe6d
3
  size 219168
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6064021fd19860d705283dfb31cd15e6e4e1776dc4c1af0e29c8ee421deafa64
3
  size 219168
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _mlx_rmsnorm_57d865f
3
- ops = torch.ops._mlx_rmsnorm_57d865f
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_mlx_rmsnorm_57d865f::{op_name}"
 
1
  import torch
2
+ from . import _mlx_rmsnorm_06e17fa
3
+ ops = torch.ops._mlx_rmsnorm_06e17fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_mlx_rmsnorm_06e17fa::{op_name}"
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/functions.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from ._ops import ops
3
+
4
+ def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
5
+ original_shape = x.shape
6
+ x = x.view(-1, x.shape[-1])
7
+ weight = weight.view(-1)
8
+ output = torch.zeros_like(x)
9
+ ops.launch_forward_kernel(x, weight, output, epsilon)
10
+ output = output.view(original_shape)
11
+ return output
12
+
13
+ def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
14
+ original_shape = x.shape
15
+ x = x.view(-1, x.shape[-1])
16
+ weight = weight.view(-1)
17
+ grad_output = grad_output.view(-1)
18
+ grad_input = torch.zeros_like(x)
19
+ grad_weight = torch.zeros_like(weight)
20
+ ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
21
+ grad_input = grad_input.view(original_shape)
22
+ grad_weight = grad_weight.view(original_shape)
23
+ return grad_input, grad_weight
build/torch28-metal-aarch64-darwin/mlx_rmsnorm/layers.py CHANGED
@@ -1,5 +1,4 @@
1
- from ._ops import ops
2
-
3
  import torch
4
 
5
  class RMSNorm(torch.nn.Module):
@@ -10,23 +9,3 @@ class RMSNorm(torch.nn.Module):
10
  return rmsnorm_forward(x, self.weight, self.eps)
11
 
12
 
13
- def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
14
- original_shape = x.shape
15
- x = x.view(-1, x.shape[-1])
16
- weight = weight.view(-1)
17
- output = torch.zeros_like(x)
18
- ops.launch_forward_kernel(x, weight, output, epsilon)
19
- output = output.view(original_shape)
20
- return output
21
-
22
- def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
23
- original_shape = x.shape
24
- x = x.view(-1, x.shape[-1])
25
- weight = weight.view(-1)
26
- grad_output = grad_output.view(-1)
27
- grad_input = torch.zeros_like(x)
28
- grad_weight = torch.zeros_like(weight)
29
- ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
30
- grad_input = grad_input.view(original_shape)
31
- grad_weight = grad_weight.view(original_shape)
32
- return grad_input, grad_weight
 
1
+ from .functions import rmsnorm_forward
 
2
  import torch
3
 
4
  class RMSNorm(torch.nn.Module):
 
9
  return rmsnorm_forward(x, self.weight, self.eps)
10
 
11
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__init__.py CHANGED
@@ -1,3 +1,3 @@
1
- from .layers import RMSNorm, rmsnorm_forward, rmsnorm_backward
2
-
3
- __all__ = ["RMSNorm", "rmsnorm_forward", "rmsnorm_backward"]
 
1
+ from .functions import rmsnorm_forward, rmsnorm_backward
2
+ from . import layers
3
+ __all__ = ["layers", "rmsnorm_forward", "rmsnorm_backward"]
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc and b/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/__init__.cpython-313.pyc differ
 
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc and b/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/_ops.cpython-313.pyc differ
 
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/functions.cpython-313.pyc ADDED
Binary file (1.91 kB). View file
 
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc and b/build/torch29-metal-aarch64-darwin/mlx_rmsnorm/__pycache__/layers.cpython-313.pyc differ
 
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/{_mlx_rmsnorm_57d865f.abi3.so → _mlx_rmsnorm_06e17fa.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a507684a2ed7b127800e22962811212ac657ac126665f645e4a4bfe78e4bb3d8
3
  size 220128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3eb0d24dc5c019fa77403103e936cff377e6003f2537fdfd0892f0a0f73ae922
3
  size 220128
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _mlx_rmsnorm_57d865f
3
- ops = torch.ops._mlx_rmsnorm_57d865f
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_mlx_rmsnorm_57d865f::{op_name}"
 
1
  import torch
2
+ from . import _mlx_rmsnorm_06e17fa
3
+ ops = torch.ops._mlx_rmsnorm_06e17fa
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_mlx_rmsnorm_06e17fa::{op_name}"
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/functions.py ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ from ._ops import ops
3
+
4
+ def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
5
+ original_shape = x.shape
6
+ x = x.view(-1, x.shape[-1])
7
+ weight = weight.view(-1)
8
+ output = torch.zeros_like(x)
9
+ ops.launch_forward_kernel(x, weight, output, epsilon)
10
+ output = output.view(original_shape)
11
+ return output
12
+
13
+ def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
14
+ original_shape = x.shape
15
+ x = x.view(-1, x.shape[-1])
16
+ weight = weight.view(-1)
17
+ grad_output = grad_output.view(-1)
18
+ grad_input = torch.zeros_like(x)
19
+ grad_weight = torch.zeros_like(weight)
20
+ ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
21
+ grad_input = grad_input.view(original_shape)
22
+ grad_weight = grad_weight.view(original_shape)
23
+ return grad_input, grad_weight
build/torch29-metal-aarch64-darwin/mlx_rmsnorm/layers.py CHANGED
@@ -1,5 +1,4 @@
1
- from ._ops import ops
2
-
3
  import torch
4
 
5
  class RMSNorm(torch.nn.Module):
@@ -10,23 +9,3 @@ class RMSNorm(torch.nn.Module):
10
  return rmsnorm_forward(x, self.weight, self.eps)
11
 
12
 
13
- def rmsnorm_forward(x: torch.Tensor, weight: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
14
- original_shape = x.shape
15
- x = x.view(-1, x.shape[-1])
16
- weight = weight.view(-1)
17
- output = torch.zeros_like(x)
18
- ops.launch_forward_kernel(x, weight, output, epsilon)
19
- output = output.view(original_shape)
20
- return output
21
-
22
- def rmsnorm_backward(x: torch.Tensor, weight: torch.Tensor, grad_output: torch.Tensor, epsilon: float = 1e-5) -> torch.Tensor:
23
- original_shape = x.shape
24
- x = x.view(-1, x.shape[-1])
25
- weight = weight.view(-1)
26
- grad_output = grad_output.view(-1)
27
- grad_input = torch.zeros_like(x)
28
- grad_weight = torch.zeros_like(weight)
29
- ops.launch_backward_kernel(x, weight, grad_output, grad_input, grad_weight, epsilon)
30
- grad_input = grad_input.view(original_shape)
31
- grad_weight = grad_weight.view(original_shape)
32
- return grad_input, grad_weight
 
1
+ from .functions import rmsnorm_forward
 
2
  import torch
3
 
4
  class RMSNorm(torch.nn.Module):
 
9
  return rmsnorm_forward(x, self.weight, self.eps)
10
 
11