Skip to content

Commit cb617c0

Browse files
gramalingamCopilot
andcommitted
Remove unused variables flagged in PR review
- Remove unused local variable 'input_data' in _layer_norm_extended_test.py - Remove unused global '_EPS_F' in skip_normalization_unit_test.py Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 16e760f commit cb617c0

File tree

2 files changed

+0
-6
lines changed

2 files changed

+0
-6
lines changed

onnxscript/rewriter/ort_fusions/skip_normalization_unit_test.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@
3030
)
3131

3232
_B, _S, _D = 2, 8, 16
33-
_EPS_F = ir.tensor(np.array([1e-6], dtype=np.float32))
3433

3534

3635
# ========== Skip RMS Norm patterns ==========

onnxscript/rewriter/rules/fusion/_layer_norm_extended_test.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@
1111

1212
import unittest
1313

14-
import numpy as np
1514
import onnx_ir as ir
1615

1716
import onnxscript.optimizer
@@ -159,10 +158,6 @@ def test_div_with_bias(self):
159158
def test_double_precision(self):
160159
"""Double-precision inputs → fuses (double is a valid compute type)."""
161160
model_proto = _ln_double.to_model_proto()
162-
input_data = {
163-
"x": np.random.randn(2, 4, 8).astype(np.float64),
164-
"scale": np.random.randn(8).astype(np.float64),
165-
}
166161
model = ir.serde.deserialize_model(model_proto)
167162
count = fuse_layer_normalization(model)
168163
self.assertGreater(count, 0)

0 commit comments

Comments
 (0)