From c74899f25c8e9d584bcabdc6c7fe0886816a4d67 Mon Sep 17 00:00:00 2001 From: Xavier Dupre Date: Tue, 26 Dec 2023 19:38:39 +0100 Subject: [PATCH] Adds graph API to the tutorial --- _doc/tutorial/graph_api.rst | 59 +++++++++++++++++++ _doc/tutorial/index.rst | 1 + _doc/tutorial/onnx_api.rst | 70 ++++++++++++++++------- onnx_array_api/graph_api/graph_builder.py | 2 + onnx_array_api/plotting/text_plot.py | 24 +++----- 5 files changed, 119 insertions(+), 37 deletions(-) create mode 100644 _doc/tutorial/graph_api.rst diff --git a/_doc/tutorial/graph_api.rst b/_doc/tutorial/graph_api.rst new file mode 100644 index 0000000..b373cc3 --- /dev/null +++ b/_doc/tutorial/graph_api.rst @@ -0,0 +1,59 @@ +.. _l-graph-api: + +================================= +GraphBuilder: common API for ONNX +================================= + +This is a very common way to build ONNX graph. There are some +annoying steps while building an ONNX graph. The first one is to +give unique names to every intermediate result in the graph. The second +is the conversion from numpy arrays to onnx tensors. A *graph builder*, +here implemented by class +:class:`GraphBuilder ` +usually makes these two frequent tasks easier. + +.. runpython:: + :showcode: + + import numpy as np + from onnx_array_api.graph_api import GraphBuilder + from onnx_array_api.plotting.text_plot import onnx_simple_text_plot + + g = GraphBuilder() + g.make_tensor_input("X", np.float32, (None, None)) + g.make_tensor_input("Y", np.float32, (None, None)) + r1 = g.make_node("Sub", ["X", "Y"]) # the name given to the output is given by the class, + # it ensures the name is unique + init = g.make_initializer(np.array([2], dtype=np.int64)) # the class automatically + # converts the array to a tensor + r2 = g.make_node("Pow", [r1, init]) + g.make_node("ReduceSum", [r2], outputs=["Z"]) # the output name is given because + # the user wants to choose the name + g.make_tensor_output("Z", np.float32, (None, None)) + + onx = g.to_onnx() # final conversion to onnx + + print(onnx_simple_text_plot(onx)) + +A more simple versions of the same code to produce the same graph. + +.. runpython:: + :showcode: + + import numpy as np + from onnx_array_api.graph_api import GraphBuilder + from onnx_array_api.plotting.text_plot import onnx_simple_text_plot + + g = GraphBuilder() + g.make_tensor_input("X", np.float32, (None, None)) + g.make_tensor_input("Y", np.float32, (None, None)) + r1 = g.op.Sub("X", "Y") # the method name indicates which operator to use, + # this can be used when there is no ambiguity about the + # number of outputs + r2 = g.op.Pow(r1, np.array([2], dtype=np.int64)) + g.op.ReduceSum(r2, outputs=["Z"]) # the still wants the user to specify the name + g.make_tensor_output("Z", np.float32, (None, None)) + + onx = g.to_onnx() + + print(onnx_simple_text_plot(onx)) diff --git a/_doc/tutorial/index.rst b/_doc/tutorial/index.rst index e3ca8d7..f4cce00 100644 --- a/_doc/tutorial/index.rst +++ b/_doc/tutorial/index.rst @@ -7,6 +7,7 @@ Tutorial :maxdepth: 1 onnx_api + graph_api light_api numpy_api benchmarks diff --git a/_doc/tutorial/onnx_api.rst b/_doc/tutorial/onnx_api.rst index f27eb05..a4f80be 100644 --- a/_doc/tutorial/onnx_api.rst +++ b/_doc/tutorial/onnx_api.rst @@ -584,37 +584,31 @@ The second part modifies it. onnx.save(gs.export_onnx(graph), "modified.onnx") -numpy API for onnx -++++++++++++++++++ +Graph Builder API ++++++++++++++++++ -See :ref:`l-numpy-api-onnx`. This API was introduced to create graphs -by using numpy API. If a function is defined only with numpy, -it should be possible to use the exact same code to create the -corresponding onnx graph. That's what this API tries to achieve. -It works with the exception of control flow. In that case, the function -produces different onnx graphs depending on the execution path. +See :ref:`l-graph-api`. This API is very similar to what *skl2onnx* implements. +It is still about adding nodes to a graph but some tasks are automated such as +naming the results or converting constants to onnx classes. .. runpython:: :showcode: import numpy as np - from onnx_array_api.npx import jit_onnx + from onnx_array_api.graph_api import GraphBuilder from onnx_array_api.plotting.text_plot import onnx_simple_text_plot - def l2_loss(x, y): - return ((x - y) ** 2).sum(keepdims=1) - - jitted_myloss = jit_onnx(l2_loss) - dummy = np.array([0], dtype=np.float32) - - # The function is executed. Only then a onnx graph is created. - # One is created depending on the input type. - jitted_myloss(dummy, dummy) + g = GraphBuilder() + g.make_tensor_input("X", np.float32, (None, None)) + g.make_tensor_input("Y", np.float32, (None, None)) + r1 = g.op.Sub("X", "Y") + r2 = g.op.Pow(r1, np.array([2], dtype=np.int64)) + g.op.ReduceSum(r2, outputs=["Z"]) + g.make_tensor_output("Z", np.float32, (None, None)) + + onx = g.to_onnx() - # get_onnx only works if it was executed once or at least with - # the same input type - model = jitted_myloss.get_onnx() - print(onnx_simple_text_plot(model)) + print(onnx_simple_text_plot(onx)) Light API +++++++++ @@ -647,3 +641,35 @@ There is no eager mode. ) print(onnx_simple_text_plot(model)) + +numpy API for onnx +++++++++++++++++++ + +See :ref:`l-numpy-api-onnx`. This API was introduced to create graphs +by using numpy API. If a function is defined only with numpy, +it should be possible to use the exact same code to create the +corresponding onnx graph. That's what this API tries to achieve. +It works with the exception of control flow. In that case, the function +produces different onnx graphs depending on the execution path. + +.. runpython:: + :showcode: + + import numpy as np + from onnx_array_api.npx import jit_onnx + from onnx_array_api.plotting.text_plot import onnx_simple_text_plot + + def l2_loss(x, y): + return ((x - y) ** 2).sum(keepdims=1) + + jitted_myloss = jit_onnx(l2_loss) + dummy = np.array([0], dtype=np.float32) + + # The function is executed. Only then a onnx graph is created. + # One is created depending on the input type. + jitted_myloss(dummy, dummy) + + # get_onnx only works if it was executed once or at least with + # the same input type + model = jitted_myloss.get_onnx() + print(onnx_simple_text_plot(model)) diff --git a/onnx_array_api/graph_api/graph_builder.py b/onnx_array_api/graph_api/graph_builder.py index b92d96b..0080cff 100644 --- a/onnx_array_api/graph_api/graph_builder.py +++ b/onnx_array_api/graph_api/graph_builder.py @@ -50,7 +50,9 @@ class Opset: "Mul": 1, "Log": 1, "Or": 1, + "Pow": 1, "Relu": 1, + "ReduceSum": 1, "Reshape": 1, "Shape": 1, "Slice": 1, diff --git a/onnx_array_api/plotting/text_plot.py b/onnx_array_api/plotting/text_plot.py index 36f9feb..9449acb 100644 --- a/onnx_array_api/plotting/text_plot.py +++ b/onnx_array_api/plotting/text_plot.py @@ -184,9 +184,7 @@ def iterate(nodes, node, depth=0, true_false=""): rows.extend(r) return "\n".join(rows) - raise NotImplementedError( # pragma: no cover - f"Type {node.op_type!r} cannot be displayed." - ) + raise NotImplementedError(f"Type {node.op_type!r} cannot be displayed.") def _append_succ_pred( @@ -403,7 +401,7 @@ def _find_sequence(node_name, known, done): ) if not sequences: - raise RuntimeError( # pragma: no cover + raise RuntimeError( "Unexpected empty sequence (len(possibles)=%d, " "len(done)=%d, len(nodes)=%d). This is usually due to " "a name used both as result name and node node. " @@ -434,7 +432,7 @@ def _find_sequence(node_name, known, done): best = k if best is None: - raise RuntimeError( # pragma: no cover + raise RuntimeError( f"Wrong implementation (len(sequence)={len(sequences)})." ) if verbose: @@ -453,7 +451,7 @@ def _find_sequence(node_name, known, done): known |= set(v.output) if len(new_nodes) != len(nodes): - raise RuntimeError( # pragma: no cover + raise RuntimeError( "The returned new nodes are different. " "len(nodes=%d) != %d=len(new_nodes). done=\n%r" "\n%s\n----------\n%s" @@ -486,7 +484,7 @@ def _find_sequence(node_name, known, done): n0s = set(n.name for n in nodes) n1s = set(n.name for n in new_nodes) if n0s != n1s: - raise RuntimeError( # pragma: no cover + raise RuntimeError( "The returned new nodes are different.\n" "%r !=\n%r\ndone=\n%r" "\n----------\n%s\n----------\n%s" @@ -758,7 +756,7 @@ def str_node(indent, node): try: val = str(to_array(att.t).tolist()) except TypeError as e: - raise TypeError( # pragma: no cover + raise TypeError( "Unable to display tensor type %r.\n%s" % (att.type, str(att)) ) from e @@ -853,9 +851,7 @@ def str_node(indent, node): if isinstance(att, str): rows.append(f"attribute: {att!r}") else: - raise NotImplementedError( # pragma: no cover - "Not yet introduced in onnx." - ) + raise NotImplementedError("Not yet introduced in onnx.") # initializer if hasattr(model, "initializer"): @@ -894,7 +890,7 @@ def str_node(indent, node): try: nodes = reorder_nodes_for_display(model.node, verbose=verbose) - except RuntimeError as e: # pragma: no cover + except RuntimeError as e: if raise_exc: raise e else: @@ -924,9 +920,7 @@ def str_node(indent, node): indent = mi if previous_indent is not None and indent < previous_indent: if verbose: - print( # pragma: no cover - f"[onnx_simple_text_plot] break2 {node.op_type}" - ) + print(f"[onnx_simple_text_plot] break2 {node.op_type}") add_break = True if not add_break and previous_out is not None: if not (set(node.input) & previous_out): pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy