Skip to content

Implements GraphBuilder for the graph_api #57

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Dec 25, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
improve code coverage
  • Loading branch information
xadupre committed Dec 22, 2023
commit 2c739f94ab181720eaa13444d6e330d287708477
142 changes: 140 additions & 2 deletions _unittests/ut_graph_api/test_graph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@
import unittest
import numpy as np
import onnx
from onnx.reference import ReferenceEvaluator
from onnx_array_api.ext_test_case import ExtTestCase
from onnx_array_api.graph_api.graph_builder import GraphBuilder
from onnx_array_api.graph_api.graph_builder import GraphBuilder, OptimizationOptions
from onnx_array_api.reference import (
from_array_extended,
ExtendedReferenceEvaluator as ReferenceEvaluator,
)


class TestGraphBuilder(ExtTestCase):
Expand Down Expand Up @@ -130,6 +133,35 @@ def test_constant_folding(self):
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_constant_folding2(self):
g = GraphBuilder(
optimization_options=OptimizationOptions(constant_folding=True)
)

shape = (10, 4)
w = np.random.randn(*shape).astype(np.float32)
x = g.make_tensor_input("X", np.float32, shape)
weight = g.make_initializer(w)
cst = g.get_constant(weight)
self.assertEqualArray(w, cst)
one = g.make_initializer(np.array([-1, 1], dtype=np.int64))
transposed = g.make_node("Transpose", [weight], perm=[1, 0])
res = g.op.MatMul(x, transposed)
g.op.Reshape(res, one, outputs="y")
g.make_tensor_output("y", np.float32, (10, 1))

g.optimize()

onx = g.to_onnx()
node_types = [n.op_type for n in onx.graph.node]
self.assertNotIn("Transpose", node_types)
ref = ReferenceEvaluator(onx)
x = np.random.randn(*shape).astype(np.float32)
expected = (x @ w.T).reshape((-1, 1))
feeds = {"X": x}
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_remove_identity(self):
with contextlib.redirect_stdout(io.StringIO()):
g = GraphBuilder(verbose=10)
Expand Down Expand Up @@ -238,6 +270,112 @@ def test_remove_unused_nodes_simple(self):
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_constant_array(self):
with contextlib.redirect_stdout(io.StringIO()):
g = GraphBuilder(verbose=10)

shape = (10, 4)
w = np.random.randn(*shape).astype(np.float32)

x = g.make_tensor_input("X", np.float32, shape)
one = g.make_initializer(np.array([-1, 1], dtype=np.int64))
res = g.op.MatMul(x, w.T)
g.op.Reshape(res, one, outputs="y")
g.make_tensor_output("y", np.float32, (10, 1))
onx = g.to_onnx()
ref = ReferenceEvaluator(onx)
x = np.random.randn(*shape).astype(np.float32)
expected = (x @ w.T).reshape((-1, 1))
feeds = {"X": x}
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_constant_array_2(self):
with contextlib.redirect_stdout(io.StringIO()):
g = GraphBuilder(verbose=10)

shape = (10, 4)
w = np.random.randn(*shape).astype(np.float32)

x = g.make_tensor_input("X", np.float32, shape)
one = g.make_initializer(np.array([-1, 1], dtype=np.int64))
opc = g.op.Constant(value=from_array_extended(w.T))
res = g.op.MatMul(x, opc)
g.op.Reshape(res, one, outputs="y")
g.make_tensor_output("y", np.float32, (10, 1))
self.assertTrue(g.has_shape("X"))
self.assertTrue(g.has_type("X"))
self.assertEqual(g.get_type("X"), 1)
self.assertEqual(g.get_shape("X"), (10, 4))
self.assertEqual(g.rank("X"), 2)
onx = g.to_onnx()
ref = ReferenceEvaluator(onx)
x = np.random.randn(*shape).astype(np.float32)
expected = (x @ w.T).reshape((-1, 1))
feeds = {"X": x}
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_get_type(self):
g = GraphBuilder()
self.assertEqual(g._get_type(np.float32), onnx.TensorProto.FLOAT)
self.assertEqual(g._get_type(np.int64), onnx.TensorProto.INT64)
self.assertEqual(g._get_type(None), onnx.TensorProto.UNDEFINED)

def test_make_nodes_prefix(self):
g1 = GraphBuilder()
g1.make_tensor_input("X", np.float32, shape=None)
g1.op.Add("X", np.array([1], dtype=np.float32), outputs=["y"])
g1.make_tensor_output("y", np.float32, shape=None)

g = GraphBuilder()

shape = (10, 4)
w = np.random.randn(*shape).astype(np.float32)

x = g.make_tensor_input("X", np.float32, shape)
weight = g.make_initializer(w)
one = g.make_initializer(np.array([-1, 1], dtype=np.int64))
transposed = g.make_node("Transpose", [weight], perm=[1, 0])
res = g.op.MatMul(x, transposed)
res2 = g.make_nodes(g1, [res], ["k"], prefix="J")
g.op.Reshape(res2, one, outputs="y")
g.make_tensor_output("y", np.float32, (10, 1))
onx = g.to_onnx()
ref = ReferenceEvaluator(onx)
x = np.random.randn(*shape).astype(np.float32)
expected = (x @ w.T).reshape((-1, 1)) + 1
feeds = {"X": x}
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])

def test_make_nodes_noprefix(self):
g1 = GraphBuilder()
g1.make_tensor_input("X", np.float32, shape=None)
g1.op.Add("X", np.array([1], dtype=np.float32), outputs=["y"])
g1.make_tensor_output("y", np.float32, shape=None)

g = GraphBuilder()

shape = (10, 4)
w = np.random.randn(*shape).astype(np.float32)

x = g.make_tensor_input("X", np.float32, shape)
weight = g.make_initializer(w)
one = g.make_initializer(np.array([-1, 1], dtype=np.int64))
transposed = g.make_node("Transpose", [weight], perm=[1, 0])
res = g.op.MatMul(x, transposed)
res2 = g.make_nodes(g1, [res], ["k"])
g.op.Reshape(res2, one, outputs="y")
g.make_tensor_output("y", np.float32, (10, 1))
onx = g.to_onnx()
ref = ReferenceEvaluator(onx)
x = np.random.randn(*shape).astype(np.float32)
expected = (x @ w.T).reshape((-1, 1)) + 1
feeds = {"X": x}
got = ref.run(None, feeds)
self.assertEqualArray(expected, got[0])


if __name__ == "__main__":
unittest.main(verbosity=2)
61 changes: 35 additions & 26 deletions onnx_array_api/graph_api/graph_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,18 @@
T = "TENSOR"


class OptimizationOptions:
def __init__(
self,
remove_unused: bool = True,
constant_folding: bool = False,
constant_size: int = 1024,
):
self.remove_unused = remove_unused
self.constant_folding = constant_folding
self.constant_size = constant_size


class Opset:
# defined for opset >= 18
# name: number of expected outputs
Expand Down Expand Up @@ -76,7 +88,7 @@ def make_node(
for i in inputs:
if not isinstance(i, str):
name = self.builder.unique_name("cst")
self.builder.make_initializer(i, name=name)
self.builder.make_initializer(i, name=name, exists=True)
new_inputs.append(name)
else:
new_inputs.append(i)
Expand All @@ -86,18 +98,6 @@ def make_node(
)


class OptimizationOptions:
def __init__(
self,
remove_unused: bool = True,
constant_folding: bool = False,
constant_size: int = 1024,
):
self.remove_unused = remove_unused
self.constant_folding = constant_folding
self.constant_size = constant_size


class GraphBuilder:
def __init__(
self,
Expand Down Expand Up @@ -304,12 +304,18 @@ def _get_type(self, elem_type: Any, exc: bool = True) -> int:
return elem_type

def make_initializer(
self, value: Any, name: str = "", external: bool = False
self, value: Any, name: str = "", external: bool = False, exists: bool = False
) -> str:
if external:
raise NotImplementedError("External initializers are not implemented yet.")
if name == "":
if exists:
raise ValueError("Undefined name cannot exist.")
name = self.unique_name("cst")
elif not exists:
if name in self._unique_names:
raise ValueError(f"{name!r} is already assigned.")
self._unique_names.add(name)
self.set_shape(name, value.shape)
self.set_type(name, self._get_type(value.dtype))
self.initializers_dict[name] = value
Expand All @@ -330,6 +336,9 @@ def make_tensor_input(
else:
self.input_names.append(name)
input_name = name
if name in self._unique_names:
raise ValueError(f"{name!r} is already assigned.")
self._unique_names.add(name)
self.current_input += 1
elem_type = self._get_type(elem_type)
self.inputs.append(oh.make_tensor_value_info(input_name, elem_type, shape))
Expand Down Expand Up @@ -397,15 +406,11 @@ def make_node(
try:
node = oh.make_node(op_type, inputs, output_names, domain=domain, **kwargs)
except TypeError as e:
iti = [type(i) for i in inputs]
ito = (
[type(o) for o in outputs]
if isinstance(outputs, (tuple, list))
else outputs
)
raise TypeError(
f"A node {op_type!r} cannot be created with "
f"inputs={inputs} (types={iti}), outputs={outputs} (types={ito}), "
f"inputs={inputs} (types={[type(i) for i in inputs]}), "
f"outputs={outputs} "
f"(types={[type(o) for o in outputs] if isinstance(outputs, (tuple, list)) else outputs}), "
f"domain={domain!r}, kwargs={kwargs}."
) from e
if attributes:
Expand Down Expand Up @@ -474,14 +479,18 @@ def make_nodes(
self.set_shape(name, builder._known_shapes[init])
self.set_type(name, builder._known_types[init])

assert len(input_names) == len(
builder.inputs
), f"Inconsistency between input_names={input_names} and inputs={builder.inputs}."
assert len(input_names) == len(builder.inputs), (
f"Inconsistency between input_names={input_names} "
f"and the other builder inputs={builder.inputs}."
)

for name, inp in zip(input_names, builder.inputs):
new_name = self.unique_name(f"{prefix}{inp.name}")
self.set_shape(new_name, builder.get_shape(inp.name))
self.set_type(new_name, builder.get_type(inp.name))
renaming[inp.name] = new_name
if builder.has_shape(inp.name):
self.set_shape(new_name, builder.get_shape(inp.name))
if builder.has_type(inp.name):
self.set_type(new_name, builder.get_type(inp.name))
self.make_node("Identity", [name], [new_name])

for node in builder.nodes:
Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy