Skip to content

Extends export onnx to code to support inner API #47

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Nov 12, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
update code
  • Loading branch information
xadupre committed Nov 10, 2023
commit b6db1d326d668188b29a62fff4d252a5e132d9b9
2 changes: 1 addition & 1 deletion _doc/api/light_api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ Classes for the Translater
Emitter
+++++++

.. autoclass:: onnx_array_api.light_api.translate.Emitter
.. autoclass:: onnx_array_api.light_api.emitter.Emitter
:members:

EventType
Expand Down
6 changes: 6 additions & 0 deletions _unittests/ut_light_api/test_translate_classic.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,12 @@ def test_transpose(self):
initializers = []
sparse_initializers = []
functions = []
initializers.append(
from_array(
np.array([-1, 1]).astype(np.int64),
name='r'
)
)
inputs.append(make_tensor_value_info('X', TensorProto.FLOAT, shape=[]))
nodes.append(
make_node(
Expand Down
134 changes: 134 additions & 0 deletions onnx_array_api/light_api/emitter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
from typing import Any, Dict, List, Tuple
from enum import IntEnum
import numpy as np
from .annotations import ELEMENT_TYPE_NAME


class EventType(IntEnum):
START = 0
INPUT = 1
OUTPUT = 2
NODE = 3
TO_ONNX = 4
BEGIN_GRAPH = 5
END_GRAPH = 6
BEGIN_FUNCTION = 7
END_FUNCTION = 8
INITIALIZER = 9


class Emitter:
"""
Converts event into proper code.
"""

def join(self, rows: List[str], single_line: bool = False) -> str:
"Join the rows"
if single_line:
return ".".join(rows)
return "".join(["(\n ", "\n .".join(rows), "\n)"])

def __call__(self, event: EventType, **kwargs: Dict[str, Any]) -> List[str]:
"""
Converts an event into an instruction.

:param event: event kind
:param kwargs: event parameters
:return: list of instructions
"""
if event == EventType.START:
opsets = kwargs.get("opsets", {})
opset = opsets.get("", None)
if opset is not None:
del opsets[""]
args = []
if opset:
args.append(f"opset={opset}")
if opsets:
args.append(f"opsets={opsets}")
return [f"start({', '.join(args)})"]

if event == EventType.TO_ONNX:
return ["to_onnx()"]

if event == EventType.BEGIN_GRAPH:
return []

if event == EventType.END_GRAPH:
return []

if event == EventType.INPUT:
name = kwargs["name"]
elem_type = kwargs.get("elem_type", None)
shape = kwargs.get("shape", None)
if elem_type and shape:
return [
f"vin({name!r}, elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]}, shape={shape!r})"
]
if elem_type:
return [
f"vin({name!r}, elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]})"
]
return [f"vin({name!r})"]

if event == EventType.OUTPUT:
inst = []
if "name" in kwargs:
name = kwargs["name"]
inst.append(f"bring({name!r})")
elem_type = kwargs.get("elem_type", None)
shape = kwargs.get("shape", None)
if elem_type and shape:
inst.append(
f"vout(elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]}, shape={shape!r})"
)
elif elem_type:
inst.append(
f"vout(elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]})"
)
else:
inst.append("vout()")
return inst

if event == EventType.NODE:
op_type = kwargs["op_type"]
inputs = kwargs["inputs"]
outputs = kwargs["outputs"]
if kwargs.get("domain", "") != "":
domain = kwargs["domain"]
raise NotImplementedError(f"domain={domain!r} not supported yet.")
atts = kwargs.get("atts", {})
args = []
for k, v in atts.items():
before, vatt = self.render_attribute_value(v)
if before:
raise NotImplementedError("Graph attribute not supported yet.")
args.append(f"{k}={vatt}")

str_inputs = ", ".join([f"{i!r}" for i in inputs])
inst = [f"bring({str_inputs})", f"{op_type}({', '.join(args)})"]
if len(outputs) == 1:
inst.append(f"rename({outputs[0]!r})")
else:
str_outputs = ", ".join([f"{o!r}" for o in outputs])
inst.append(f"rename({str_outputs})")
return inst

raise ValueError(f"Unexpected EventType {event}.")

def render_attribute_value(self, value: Any) -> Tuple[List[str], str]:
"""
Renders an attribute value into a string.

:param value: value to converter
:return: rows to append before, actual value
"""
v = value[-1]
if isinstance(v, (int, float, list)):
return [], str(v)
if isinstance(v, np.ndarray):
if len(v.shape) == 0:
return [], str(v)
if len(v.shape) == 1:
return [], str(v.tolist())
raise ValueError(f"Unable to render an attribute {value}.")
18 changes: 17 additions & 1 deletion onnx_array_api/light_api/inner_emitter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from typing import Any, Dict, List, Tuple
from onnx import AttributeProto
from onnx import AttributeProto, TensorProto
from .annotations import ELEMENT_TYPE_NAME
from .translate import Emitter, EventType, Translater

Expand Down Expand Up @@ -63,6 +63,22 @@ def __call__(self, event: EventType, **kwargs: Dict[str, Any]) -> List[str]:
]
return lines

if event == EventType.INITIALIZER:
name = kwargs["name"]
value = kwargs["value"]
dtype = {
TensorProto.FLOAT: "float32",
TensorProto.INT64: "int64",
}[kwargs["init"].data_type]
return [
"initializers.append(",
" from_array(",
f" np.{repr(value).strip()}.astype(np.{dtype}),",
f" name={name!r}",
" )",
")",
]

if event in (EventType.INPUT, EventType.OUTPUT):
container = "inputs" if event == EventType.INPUT else "outputs"
name = kwargs["name"]
Expand Down
151 changes: 20 additions & 131 deletions onnx_array_api/light_api/translate.py
Original file line number Diff line number Diff line change
@@ -1,138 +1,8 @@
from typing import Any, Dict, List, Optional, Tuple, Union
from enum import IntEnum
import numpy as np
from onnx import AttributeProto, FunctionProto, GraphProto, ModelProto, NodeProto
from onnx.numpy_helper import to_array
from .annotations import ELEMENT_TYPE_NAME


class EventType(IntEnum):
START = 0
INPUT = 1
OUTPUT = 2
NODE = 3
TO_ONNX = 4
BEGIN_GRAPH = 5
END_GRAPH = 6
BEGIN_FUNCTION = 7
END_FUNCTION = 8


class Emitter:
"""
Converts event into proper code.
"""

def join(self, rows: List[str], single_line: bool = False) -> str:
"Join the rows"
if single_line:
return ".".join(rows)
return "".join(["(\n ", "\n .".join(rows), "\n)"])

def __call__(self, event: EventType, **kwargs: Dict[str, Any]) -> List[str]:
"""
Converts an event into an instruction.

:param event: event kind
:param kwargs: event parameters
:return: list of instructions
"""
if event == EventType.START:
opsets = kwargs.get("opsets", {})
opset = opsets.get("", None)
if opset is not None:
del opsets[""]
args = []
if opset:
args.append(f"opset={opset}")
if opsets:
args.append(f"opsets={opsets}")
return [f"start({', '.join(args)})"]

if event == EventType.TO_ONNX:
return ["to_onnx()"]

if event == EventType.BEGIN_GRAPH:
return []

if event == EventType.END_GRAPH:
return []

if event == EventType.INPUT:
name = kwargs["name"]
elem_type = kwargs.get("elem_type", None)
shape = kwargs.get("shape", None)
if elem_type and shape:
return [
f"vin({name!r}, elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]}, shape={shape!r})"
]
if elem_type:
return [
f"vin({name!r}, elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]})"
]
return [f"vin({name!r})"]

if event == EventType.OUTPUT:
inst = []
if "name" in kwargs:
name = kwargs["name"]
inst.append(f"bring({name!r})")
elem_type = kwargs.get("elem_type", None)
shape = kwargs.get("shape", None)
if elem_type and shape:
inst.append(
f"vout(elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]}, shape={shape!r})"
)
elif elem_type:
inst.append(
f"vout(elem_type=TensorProto.{ELEMENT_TYPE_NAME[elem_type]})"
)
else:
inst.append("vout()")
return inst

if event == EventType.NODE:
op_type = kwargs["op_type"]
inputs = kwargs["inputs"]
outputs = kwargs["outputs"]
if kwargs.get("domain", "") != "":
domain = kwargs["domain"]
raise NotImplementedError(f"domain={domain!r} not supported yet.")
atts = kwargs.get("atts", {})
args = []
for k, v in atts.items():
before, vatt = self.render_attribute_value(v)
if before:
raise NotImplementedError("Graph attribute not supported yet.")
args.append(f"{k}={vatt}")

str_inputs = ", ".join([f"{i!r}" for i in inputs])
inst = [f"bring({str_inputs})", f"{op_type}({', '.join(args)})"]
if len(outputs) == 1:
inst.append(f"rename({outputs[0]!r})")
else:
str_outputs = ", ".join([f"{o!r}" for o in outputs])
inst.append(f"rename({str_outputs})")
return inst

raise ValueError(f"Unexpected EventType {event}.")

def render_attribute_value(self, value: Any) -> Tuple[List[str], str]:
"""
Renders an attribute value into a string.

:param value: value to converter
:return: rows to append before, actual value
"""
v = value[-1]
if isinstance(v, (int, float, list)):
return [], str(v)
if isinstance(v, np.ndarray):
if len(v.shape) == 0:
return [], str(v)
if len(v.shape) == 1:
return [], str(v.tolist())
raise ValueError(f"Unable to render an attribute {value}.")
from .emitter import EventType, Emitter


class Translater:
Expand Down Expand Up @@ -166,20 +36,39 @@ def export(self, as_str, single_line: bool = False) -> Union[str, List[str]]:
inputs = self.proto_.graph.input
outputs = self.proto_.graph.output
nodes = self.proto_.graph.node
initializers = self.proto_.graph.initializer
sparse_initializers = self.proto_.graph.sparse_initializer
elif isinstance(self.proto_, (FunctionProto, GraphProto)):
inputs = self.proto_.input
outputs = self.proto_.output
nodes = self.proto_.node
if isinstance(self.proto_, GraphProto):
initializers = self.proto_.initializer
sparse_initializers = self.proto_.sparse_initializer
else:
initializers = []
sparse_initializers = []
else:
raise ValueError(f"Unexpected type {type(self.proto_)} for proto.")

if len(sparse_initializers) != 0:
raise NotImplementedError("Sparse initializer not supported yet.")

rows.extend(
self.emitter(
EventType.BEGIN_FUNCTION
if isinstance(self.proto_, FunctionProto)
else EventType.BEGIN_GRAPH
)
)

for i in initializers:
rows.extend(
self.emitter(
EventType.INITIALIZER, name=i.name, init=i, value=to_array(i)
)
)

for i in inputs:
if isinstance(i, str):
rows.extend(self.emitter(EventType.INPUT, name=i))
Expand Down
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy