Build
Browse files
build/torch-universal/op_without_fake_test/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn.functional as F
|
| 3 |
+
|
| 4 |
+
from . import layers
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
@torch.library.custom_op("op_without_fake_test::silu_and_mul", mutates_args=())
|
| 8 |
+
def _silu_and_mul(x: torch.Tensor) -> torch.Tensor:
|
| 9 |
+
d = x.shape[-1] // 2
|
| 10 |
+
return F.silu(x[..., :d]) * x[..., d:]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
["layers"]
|
build/torch-universal/op_without_fake_test/layers.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class SiluAndMul(nn.Module):
|
| 6 |
+
has_backward = False
|
| 7 |
+
can_torch_compile = False
|
| 8 |
+
|
| 9 |
+
def forward(self, x: torch.Tensor) -> torch.Tensor:
|
| 10 |
+
return torch.ops.op_without_fake_test.silu_and_mul(x)
|