Skip to content

Commit 95b4c05

Browse files
gramalingamCopilot
andcommitted
fix: address PR review feedback
- Remove duplicate if __name__ == '__main__' block before RootInitializerTest - Replace assert with ValueError in lift_initializers_to_constants - Fix initializer() docstring to note input(const_value=) exception - Use self.initializer() for non-cached path to preserve scope naming - Remove unused captured_values in test Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com> Signed-off-by: G Ramalingam <grama@microsoft.com>
1 parent dc52443 commit 95b4c05

File tree

2 files changed

+11
-17
lines changed

2 files changed

+11
-17
lines changed

onnxscript/_internal/builder.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -102,7 +102,8 @@ def lift_initializers_to_constants(graph: ir.Graph) -> None:
102102
new_nodes: list[ir.Node] = []
103103
for value in to_lift:
104104
tensor = value.const_value
105-
assert tensor is not None, f"Initializer {value.name!r} has no const_value"
105+
if tensor is None:
106+
raise ValueError(f"Initializer {value.name!r} has no const_value")
106107
# Build a Constant node whose output is the *same* ir.Value so
107108
# that every existing reference keeps working.
108109
node = ir.Node(
@@ -320,9 +321,12 @@ def initializer(
320321
) -> ir.Value:
321322
"""Register a tensor as a graph initializer in the **root** graph.
322323
323-
All initializers are stored in the root graph so that inner scopes
324-
(subgraphs) can reference them via ONNX's outer-scope visibility
325-
rules. For function bodies (which cannot have initializers), apply
324+
Initializers created through this method are stored in the root graph
325+
so that inner scopes (subgraphs) can reference them via ONNX's
326+
outer-scope visibility rules. This does not apply to the ONNX
327+
default-input pattern created via :meth:`input` with ``const_value``,
328+
which registers an initializer on the owning graph. For function
329+
bodies (which cannot have initializers), apply
326330
:func:`lift_initializers_to_constants` before wrapping in
327331
:class:`ir.Function`.
328332
"""
@@ -430,7 +434,7 @@ def _get_or_create_constant(
430434
# For other types (TensorProtocol, numpy arrays, torch tensors, etc.),
431435
# ir.tensor() handles the conversion.
432436
# TODO(rama): Consider caching for other tensor values.
433-
return root.initializer(ir.tensor(value, dtype=dtype))
437+
return self.initializer(ir.tensor(value, dtype=dtype))
434438

435439
def _input_to_ir_value(
436440
self, value: VALUE_LIKE, like_type: ir.Value | None = None

onnxscript/_internal/builder_test.py

Lines changed: 2 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1441,10 +1441,6 @@ def _add_extra_input(op, x, y, z):
14411441
)
14421442

14431443

1444-
if __name__ == "__main__":
1445-
unittest.main()
1446-
1447-
14481444
class RootInitializerTest(unittest.TestCase):
14491445
"""Tests for root-graph initializer storage and lift_initializers_to_constants."""
14501446

@@ -1485,17 +1481,11 @@ def test_sibling_subgraphs_share_root_cache(self):
14851481
)
14861482
root_builder = builder.GraphBuilder(root_graph)
14871483

1488-
captured_values = []
1489-
14901484
def body1(op, x):
1491-
result = op.Add(x, 2.0)
1492-
captured_values.append(result)
1493-
return result
1485+
return op.Add(x, 2.0)
14941486

14951487
def body2(op, x):
1496-
result = op.Mul(x, 2.0)
1497-
captured_values.append(result)
1498-
return result
1488+
return op.Mul(x, 2.0)
14991489

15001490
root_builder.subgraph(body1, inputs=[FLOAT[3]], outputs=[FLOAT[3]])
15011491
root_builder.subgraph(body2, inputs=[FLOAT[3]], outputs=[FLOAT[3]])

0 commit comments

Comments
 (0)