Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
parse_helper
(attrs, attrs_name, alt_value=None)
Helper function to parse operator attributes in required format.
Helper function to parse operator attributes in required format.
def parse_helper(attrs, attrs_name, alt_value=None): """Helper function to parse operator attributes in required format.""" tuple_re = re.compile('\([0-9L|,| ]+\)') if not attrs: return alt_value attrs_str = None if attrs.get(attrs_name) is None else str(attrs.get(attrs_name)) if attrs_str is None: return alt_value attrs_match = tuple_re.search(attrs_str) if attrs_match is not None: if attrs_match.span() == (0, len(attrs_str)): dims = eval(attrs_str) return dims else: raise AttributeError("Malformed %s dimensions: %s" % (attrs_name, str(attrs_str))) return alt_value
[ "def", "parse_helper", "(", "attrs", ",", "attrs_name", ",", "alt_value", "=", "None", ")", ":", "tuple_re", "=", "re", ".", "compile", "(", "'\\([0-9L|,| ]+\\)'", ")", "if", "not", "attrs", ":", "return", "alt_value", "attrs_str", "=", "None", "if", "attrs", ".", "get", "(", "attrs_name", ")", "is", "None", "else", "str", "(", "attrs", ".", "get", "(", "attrs_name", ")", ")", "if", "attrs_str", "is", "None", ":", "return", "alt_value", "attrs_match", "=", "tuple_re", ".", "search", "(", "attrs_str", ")", "if", "attrs_match", "is", "not", "None", ":", "if", "attrs_match", ".", "span", "(", ")", "==", "(", "0", ",", "len", "(", "attrs_str", ")", ")", ":", "dims", "=", "eval", "(", "attrs_str", ")", "return", "dims", "else", ":", "raise", "AttributeError", "(", "\"Malformed %s dimensions: %s\"", "%", "(", "attrs_name", ",", "str", "(", "attrs_str", ")", ")", ")", "return", "alt_value" ]
[ 64, 0 ]
[ 79, 20 ]
python
en
['en', 'en', 'en']
True
transform_padding
(pad_width)
Helper function to convert padding format for pad operator.
Helper function to convert padding format for pad operator.
def transform_padding(pad_width): """Helper function to convert padding format for pad operator. """ num_pad_values = len(pad_width) onnx_pad_width = [0]*num_pad_values start_index = 0 # num_pad_values will always be multiple of 2 end_index = int(num_pad_values/2) for idx in range(0, num_pad_values): if idx % 2 == 0: onnx_pad_width[start_index] = pad_width[idx] start_index += 1 else: onnx_pad_width[end_index] = pad_width[idx] end_index += 1 return onnx_pad_width
[ "def", "transform_padding", "(", "pad_width", ")", ":", "num_pad_values", "=", "len", "(", "pad_width", ")", "onnx_pad_width", "=", "[", "0", "]", "*", "num_pad_values", "start_index", "=", "0", "# num_pad_values will always be multiple of 2", "end_index", "=", "int", "(", "num_pad_values", "/", "2", ")", "for", "idx", "in", "range", "(", "0", ",", "num_pad_values", ")", ":", "if", "idx", "%", "2", "==", "0", ":", "onnx_pad_width", "[", "start_index", "]", "=", "pad_width", "[", "idx", "]", "start_index", "+=", "1", "else", ":", "onnx_pad_width", "[", "end_index", "]", "=", "pad_width", "[", "idx", "]", "end_index", "+=", "1", "return", "onnx_pad_width" ]
[ 81, 0 ]
[ 98, 25 ]
python
en
['en', 'en', 'en']
True
convert_string_to_list
(string_val)
Helper function to convert string to list. Used to convert shape attribute string to list format.
Helper function to convert string to list. Used to convert shape attribute string to list format.
def convert_string_to_list(string_val): """Helper function to convert string to list. Used to convert shape attribute string to list format. """ result_list = [] list_string = string_val.split(',') for val in list_string: val = str(val.strip()) val = val.replace("(", "") val = val.replace(")", "") val = val.replace("L", "") val = val.replace("[", "") val = val.replace("]", "") if val not in ("", "None"): result_list.append(int(val)) return result_list
[ "def", "convert_string_to_list", "(", "string_val", ")", ":", "result_list", "=", "[", "]", "list_string", "=", "string_val", ".", "split", "(", "','", ")", "for", "val", "in", "list_string", ":", "val", "=", "str", "(", "val", ".", "strip", "(", ")", ")", "val", "=", "val", ".", "replace", "(", "\"(\"", ",", "\"\"", ")", "val", "=", "val", ".", "replace", "(", "\")\"", ",", "\"\"", ")", "val", "=", "val", ".", "replace", "(", "\"L\"", ",", "\"\"", ")", "val", "=", "val", ".", "replace", "(", "\"[\"", ",", "\"\"", ")", "val", "=", "val", ".", "replace", "(", "\"]\"", ",", "\"\"", ")", "if", "val", "not", "in", "(", "\"\"", ",", "\"None\"", ")", ":", "result_list", ".", "append", "(", "int", "(", "val", ")", ")", "return", "result_list" ]
[ 101, 0 ]
[ 118, 22 ]
python
en
['en', 'en', 'en']
True
get_boolean_attribute_value
(attrs, attr_name)
Helper function to convert a string version of Boolean attributes to integer for ONNX. Takes attribute dictionary and attr_name as parameters.
Helper function to convert a string version of Boolean attributes to integer for ONNX. Takes attribute dictionary and attr_name as parameters.
def get_boolean_attribute_value(attrs, attr_name): """ Helper function to convert a string version of Boolean attributes to integer for ONNX. Takes attribute dictionary and attr_name as parameters. """ return 1 if attrs.get(attr_name, 0) in ["True", "1"] else 0
[ "def", "get_boolean_attribute_value", "(", "attrs", ",", "attr_name", ")", ":", "return", "1", "if", "attrs", ".", "get", "(", "attr_name", ",", "0", ")", "in", "[", "\"True\"", ",", "\"1\"", "]", "else", "0" ]
[ 121, 0 ]
[ 127, 63 ]
python
en
['en', 'en', 'en']
True
get_inputs
(node, kwargs, with_shapes=False)
Helper function to get inputs
Helper function to get inputs
def get_inputs(node, kwargs, with_shapes=False): """Helper function to get inputs""" name = node["name"] proc_nodes = kwargs["proc_nodes"] index_lookup = kwargs["index_lookup"] graph_shapes = kwargs["graph_shapes"] inputs = node["inputs"] attrs = node.get("attrs", {}) input_nodes = [] input_shapes = [] for ip in inputs: input_node_id = index_lookup[ip[0]] try: # ip[1] defines which output index to use input_nodes.append(proc_nodes[input_node_id].output[ip[1]]) except AttributeError: # fallback to the name attribute as output if the output attribute does not exist (e.g. for data nodes) input_nodes.append(proc_nodes[input_node_id].name) input_shapes.append(graph_shapes.get(input_nodes[-1])) if with_shapes: return name, input_nodes, input_shapes, attrs return name, input_nodes, attrs
[ "def", "get_inputs", "(", "node", ",", "kwargs", ",", "with_shapes", "=", "False", ")", ":", "name", "=", "node", "[", "\"name\"", "]", "proc_nodes", "=", "kwargs", "[", "\"proc_nodes\"", "]", "index_lookup", "=", "kwargs", "[", "\"index_lookup\"", "]", "graph_shapes", "=", "kwargs", "[", "\"graph_shapes\"", "]", "inputs", "=", "node", "[", "\"inputs\"", "]", "attrs", "=", "node", ".", "get", "(", "\"attrs\"", ",", "{", "}", ")", "input_nodes", "=", "[", "]", "input_shapes", "=", "[", "]", "for", "ip", "in", "inputs", ":", "input_node_id", "=", "index_lookup", "[", "ip", "[", "0", "]", "]", "try", ":", "# ip[1] defines which output index to use", "input_nodes", ".", "append", "(", "proc_nodes", "[", "input_node_id", "]", ".", "output", "[", "ip", "[", "1", "]", "]", ")", "except", "AttributeError", ":", "# fallback to the name attribute as output if the output attribute does not exist (e.g. for data nodes)", "input_nodes", ".", "append", "(", "proc_nodes", "[", "input_node_id", "]", ".", "name", ")", "input_shapes", ".", "append", "(", "graph_shapes", ".", "get", "(", "input_nodes", "[", "-", "1", "]", ")", ")", "if", "with_shapes", ":", "return", "name", ",", "input_nodes", ",", "input_shapes", ",", "attrs", "return", "name", ",", "input_nodes", ",", "attrs" ]
[ 130, 0 ]
[ 155, 35 ]
python
en
['en', 'nl', 'en']
True
create_basic_op_node
(op_name, node, kwargs)
Helper function to create a basic operator node that doesn't contain op specific attrs
Helper function to create a basic operator node that doesn't contain op specific attrs
def create_basic_op_node(op_name, node, kwargs): """Helper function to create a basic operator node that doesn't contain op specific attrs""" name, input_nodes, _ = get_inputs(node, kwargs) node = onnx.helper.make_node( op_name, input_nodes, [name], name=name ) return [node]
[ "def", "create_basic_op_node", "(", "op_name", ",", "node", ",", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "_", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "op_name", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 158, 0 ]
[ 169, 17 ]
python
en
['en', 'en', 'en']
True
convert_weights_and_inputs
(node, **kwargs)
Helper function to convert weights and inputs.
Helper function to convert weights and inputs.
def convert_weights_and_inputs(node, **kwargs): """Helper function to convert weights and inputs. """ name, _, _ = get_inputs(node, kwargs) if kwargs["is_input"] is False: weights = kwargs["weights"] initializer = kwargs["initializer"] np_arr = weights[name] data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np_arr.dtype] dims = np.shape(np_arr) tensor_node = onnx.helper.make_tensor_value_info(name, data_type, dims) initializer.append( onnx.helper.make_tensor( name=name, data_type=data_type, dims=dims, vals=np_arr.flatten().tolist(), raw=False, ) ) return [tensor_node] else: tval_node = onnx.helper.make_tensor_value_info(name, kwargs["in_type"], kwargs["in_shape"]) return [tval_node]
[ "def", "convert_weights_and_inputs", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "_", ",", "_", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "if", "kwargs", "[", "\"is_input\"", "]", "is", "False", ":", "weights", "=", "kwargs", "[", "\"weights\"", "]", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "np_arr", "=", "weights", "[", "name", "]", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "np_arr", ".", "dtype", "]", "dims", "=", "np", ".", "shape", "(", "np_arr", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "name", ",", "data_type", ",", "dims", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "name", ",", "data_type", "=", "data_type", ",", "dims", "=", "dims", ",", "vals", "=", "np_arr", ".", "flatten", "(", ")", ".", "tolist", "(", ")", ",", "raw", "=", "False", ",", ")", ")", "return", "[", "tensor_node", "]", "else", ":", "tval_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "name", ",", "kwargs", "[", "\"in_type\"", "]", ",", "kwargs", "[", "\"in_shape\"", "]", ")", "return", "[", "tval_node", "]" ]
[ 173, 0 ]
[ 200, 26 ]
python
en
['en', 'en', 'en']
True
convert_convolution
(node, **kwargs)
Map MXNet's convolution operator attributes to onnx's Conv operator and return the created node.
Map MXNet's convolution operator attributes to onnx's Conv operator and return the created node.
def convert_convolution(node, **kwargs): """Map MXNet's convolution operator attributes to onnx's Conv operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) kernel_dims = list(parse_helper(attrs, "kernel")) stride_dims = list(parse_helper(attrs, "stride", [1, 1])) pad_dims = list(parse_helper(attrs, "pad", [0, 0])) num_group = int(attrs.get("num_group", 1)) dilations = list(parse_helper(attrs, "dilate", [1, 1])) pad_dims = pad_dims + pad_dims conv_node = onnx.helper.make_node( "Conv", inputs=input_nodes, outputs=[name], kernel_shape=kernel_dims, strides=stride_dims, dilations=dilations, pads=pad_dims, group=num_group, name=name ) return [conv_node]
[ "def", "convert_convolution", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "kernel_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"kernel\"", ")", ")", "stride_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"stride\"", ",", "[", "1", ",", "1", "]", ")", ")", "pad_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"pad\"", ",", "[", "0", ",", "0", "]", ")", ")", "num_group", "=", "int", "(", "attrs", ".", "get", "(", "\"num_group\"", ",", "1", ")", ")", "dilations", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"dilate\"", ",", "[", "1", ",", "1", "]", ")", ")", "pad_dims", "=", "pad_dims", "+", "pad_dims", "conv_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Conv\"", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "kernel_shape", "=", "kernel_dims", ",", "strides", "=", "stride_dims", ",", "dilations", "=", "dilations", ",", "pads", "=", "pad_dims", ",", "group", "=", "num_group", ",", "name", "=", "name", ")", "return", "[", "conv_node", "]" ]
[ 204, 0 ]
[ 230, 22 ]
python
en
['en', 'en', 'en']
True
convert_deconvolution
(node, **kwargs)
Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator and return the created node.
Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator and return the created node.
def convert_deconvolution(node, **kwargs): """Map MXNet's deconvolution operator attributes to onnx's ConvTranspose operator and return the created node. """ name, inputs, attrs = get_inputs(node, kwargs) kernel_dims = list(parse_helper(attrs, "kernel")) stride_dims = list(parse_helper(attrs, "stride", [1, 1])) pad_dims = list(parse_helper(attrs, "pad", [0, 0])) num_group = int(attrs.get("num_group", 1)) dilations = list(parse_helper(attrs, "dilate", [1, 1])) adj_dims = list(parse_helper(attrs, "adj", [0, 0])) pad_dims = pad_dims + pad_dims deconv_node = onnx.helper.make_node( "ConvTranspose", inputs=inputs, outputs=[name], kernel_shape=kernel_dims, strides=stride_dims, dilations=dilations, output_padding=adj_dims, pads=pad_dims, group=num_group, name=name ) return [deconv_node]
[ "def", "convert_deconvolution", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "inputs", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "kernel_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"kernel\"", ")", ")", "stride_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"stride\"", ",", "[", "1", ",", "1", "]", ")", ")", "pad_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"pad\"", ",", "[", "0", ",", "0", "]", ")", ")", "num_group", "=", "int", "(", "attrs", ".", "get", "(", "\"num_group\"", ",", "1", ")", ")", "dilations", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"dilate\"", ",", "[", "1", ",", "1", "]", ")", ")", "adj_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"adj\"", ",", "[", "0", ",", "0", "]", ")", ")", "pad_dims", "=", "pad_dims", "+", "pad_dims", "deconv_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"ConvTranspose\"", ",", "inputs", "=", "inputs", ",", "outputs", "=", "[", "name", "]", ",", "kernel_shape", "=", "kernel_dims", ",", "strides", "=", "stride_dims", ",", "dilations", "=", "dilations", ",", "output_padding", "=", "adj_dims", ",", "pads", "=", "pad_dims", ",", "group", "=", "num_group", ",", "name", "=", "name", ")", "return", "[", "deconv_node", "]" ]
[ 234, 0 ]
[ 262, 24 ]
python
en
['en', 'en', 'en']
True
convert_crop
(node, **kwargs)
Map MXNet's crop operator attributes to onnx's Crop operator and return the created node.
Map MXNet's crop operator attributes to onnx's Crop operator and return the created node.
def convert_crop(node, **kwargs): """Map MXNet's crop operator attributes to onnx's Crop operator and return the created node. """ name, inputs, attrs = get_inputs(node, kwargs) start = np.array([0, 0, 0, 0], dtype=np.int) # index是int类型 export_nodes = [] start_node = create_helper_tensor_node(start, name + '__starts', kwargs) export_nodes.extend(start_node) start_node = start_node[-1].name shape_node = create_helper_shape_node(inputs[1], inputs[1] + '__shape') export_nodes.extend(shape_node) shape_node = shape_node[-1].name crop_node = onnx.helper.make_node( "Slice", inputs=[inputs[0], name + '__starts', inputs[1] + '__shape'], # data、start、end outputs=[name], name=name ) logging.warning( "Using an experimental ONNX operator: Crop. " \ "Its definition can change.") export_nodes.extend([crop_node]) return export_nodes
[ "def", "convert_crop", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "inputs", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "start", "=", "np", ".", "array", "(", "[", "0", ",", "0", ",", "0", ",", "0", "]", ",", "dtype", "=", "np", ".", "int", ")", "# index是int类型", "export_nodes", "=", "[", "]", "start_node", "=", "create_helper_tensor_node", "(", "start", ",", "name", "+", "'__starts'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "start_node", ")", "start_node", "=", "start_node", "[", "-", "1", "]", ".", "name", "shape_node", "=", "create_helper_shape_node", "(", "inputs", "[", "1", "]", ",", "inputs", "[", "1", "]", "+", "'__shape'", ")", "export_nodes", ".", "extend", "(", "shape_node", ")", "shape_node", "=", "shape_node", "[", "-", "1", "]", ".", "name", "crop_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Slice\"", ",", "inputs", "=", "[", "inputs", "[", "0", "]", ",", "name", "+", "'__starts'", ",", "inputs", "[", "1", "]", "+", "'__shape'", "]", ",", "# data、start、end", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "logging", ".", "warning", "(", "\"Using an experimental ONNX operator: Crop. \"", "\"Its definition can change.\"", ")", "export_nodes", ".", "extend", "(", "[", "crop_node", "]", ")", "return", "export_nodes" ]
[ 298, 0 ]
[ 328, 23 ]
python
en
['en', 'en', 'en']
True
convert_upsample
(node, **kwargs)
Map MXNet's UpSampling operator attributes to onnx's Upsample operator and return the created node.
Map MXNet's UpSampling operator attributes to onnx's Upsample operator and return the created node.
def convert_upsample(node, **kwargs): """Map MXNet's UpSampling operator attributes to onnx's Upsample operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) sample_type = attrs.get('sample_type', 'nearest') sample_type = 'linear' if sample_type == 'bilinear' else sample_type scale = convert_string_to_list(attrs.get('scale')) scaleh = scalew = float(scale[0]) if len(scale) > 1: scaleh = float(scale[0]) scalew = float(scale[1]) scale = np.array([1.0, 1.0, scaleh, scalew], dtype=np.float32) roi = np.array([], dtype=np.float32) export_nodes = [] node_roi = create_helper_tensor_node(roi, name + 'roi', kwargs) export_nodes.extend(node_roi) node_roi = node_roi[-1].name node_sca = create_helper_tensor_node(scale, name + 'scale', kwargs) export_nodes.extend(node_sca) node_sca = node_sca[-1].name node = onnx.helper.make_node( 'Resize', inputs=[input_nodes[0], node_roi, node_sca], outputs=[name], coordinate_transformation_mode='asymmetric', mode=sample_type, nearest_mode='floor', name=name ) export_nodes.extend([node]) return export_nodes
[ "def", "convert_upsample", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "sample_type", "=", "attrs", ".", "get", "(", "'sample_type'", ",", "'nearest'", ")", "sample_type", "=", "'linear'", "if", "sample_type", "==", "'bilinear'", "else", "sample_type", "scale", "=", "convert_string_to_list", "(", "attrs", ".", "get", "(", "'scale'", ")", ")", "scaleh", "=", "scalew", "=", "float", "(", "scale", "[", "0", "]", ")", "if", "len", "(", "scale", ")", ">", "1", ":", "scaleh", "=", "float", "(", "scale", "[", "0", "]", ")", "scalew", "=", "float", "(", "scale", "[", "1", "]", ")", "scale", "=", "np", ".", "array", "(", "[", "1.0", ",", "1.0", ",", "scaleh", ",", "scalew", "]", ",", "dtype", "=", "np", ".", "float32", ")", "roi", "=", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "float32", ")", "export_nodes", "=", "[", "]", "node_roi", "=", "create_helper_tensor_node", "(", "roi", ",", "name", "+", "'roi'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "node_roi", ")", "node_roi", "=", "node_roi", "[", "-", "1", "]", ".", "name", "node_sca", "=", "create_helper_tensor_node", "(", "scale", ",", "name", "+", "'scale'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "node_sca", ")", "node_sca", "=", "node_sca", "[", "-", "1", "]", ".", "name", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Resize'", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", ",", "node_roi", ",", "node_sca", "]", ",", "outputs", "=", "[", "name", "]", ",", "coordinate_transformation_mode", "=", "'asymmetric'", ",", "mode", "=", "sample_type", ",", "nearest_mode", "=", "'floor'", ",", "name", "=", "name", ")", "export_nodes", ".", "extend", "(", "[", "node", "]", ")", "return", "export_nodes" ]
[ 332, 0 ]
[ 370, 23 ]
python
en
['en', 'en', 'en']
True
convert_fully_connected
(node, **kwargs)
Map MXNet's FullyConnected operator attributes to onnx's Gemm operator and return the created node.
Map MXNet's FullyConnected operator attributes to onnx's Gemm operator and return the created node.
def convert_fully_connected(node, **kwargs): """Map MXNet's FullyConnected operator attributes to onnx's Gemm operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) initializer = kwargs["initializer"] no_bias = get_boolean_attribute_value(attrs, "no_bias") fcnode = [] op_name = "flatten_" + str(kwargs["idx"]) flatten_node = onnx.helper.make_node( 'Flatten', inputs=[input_nodes[0]], outputs=[op_name], name=op_name ) input_nodes[0] = op_name fcnode.append(flatten_node) if no_bias: data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('int64')] bias_name = "bias" + str(kwargs["idx"]) tensor_node = onnx.helper.make_tensor_value_info(bias_name, data_type, (1,)) initializer.append( onnx.helper.make_tensor( name=bias_name, data_type=data_type, dims=(1,), vals=[0], raw=False, ) ) input_nodes.append(bias_name) fcnode.append(tensor_node) node = onnx.helper.make_node( "Gemm", input_nodes, # input (A, B, C) - C can be in place [name], # output alpha=1.0, beta=1.0, transA=False, transB=True, name=name ) fcnode.append(node) return fcnode
[ "def", "convert_fully_connected", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "no_bias", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"no_bias\"", ")", "fcnode", "=", "[", "]", "op_name", "=", "\"flatten_\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "flatten_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Flatten'", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", "]", ",", "outputs", "=", "[", "op_name", "]", ",", "name", "=", "op_name", ")", "input_nodes", "[", "0", "]", "=", "op_name", "fcnode", ".", "append", "(", "flatten_node", ")", "if", "no_bias", ":", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "np", ".", "dtype", "(", "'int64'", ")", "]", "bias_name", "=", "\"bias\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "bias_name", ",", "data_type", ",", "(", "1", ",", ")", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "bias_name", ",", "data_type", "=", "data_type", ",", "dims", "=", "(", "1", ",", ")", ",", "vals", "=", "[", "0", "]", ",", "raw", "=", "False", ",", ")", ")", "input_nodes", ".", "append", "(", "bias_name", ")", "fcnode", ".", "append", "(", "tensor_node", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Gemm\"", ",", "input_nodes", ",", "# input (A, B, C) - C can be in place", "[", "name", "]", ",", "# output", "alpha", "=", "1.0", ",", "beta", "=", "1.0", ",", "transA", "=", "False", ",", "transB", "=", "True", ",", "name", "=", "name", ")", "fcnode", ".", "append", "(", "node", ")", "return", "fcnode" ]
[ 435, 0 ]
[ 487, 17 ]
python
en
['en', 'en', 'en']
True
convert_batchnorm
(node, **kwargs)
Map MXNet's BatchNorm operator attributes to onnx's BatchNormalization operator and return the created node.
Map MXNet's BatchNorm operator attributes to onnx's BatchNormalization operator and return the created node.
def convert_batchnorm(node, **kwargs): """Map MXNet's BatchNorm operator attributes to onnx's BatchNormalization operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) momentum = float(attrs.get("momentum", 0.9)) eps = float(attrs.get("eps", 0.001)) bn_node = onnx.helper.make_node( "BatchNormalization", input_nodes, [name], name=name, epsilon=eps, momentum=momentum, # MXNet computes mean and variance per channel for batchnorm. # Default for onnx is across all spatial features. Relying on default # ONNX behavior of spatial=1 for ONNX opset 8 and below. As the spatial # attribute is deprecated in opset 9 and above, not explicitly encoding it. ) return [bn_node]
[ "def", "convert_batchnorm", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "momentum", "=", "float", "(", "attrs", ".", "get", "(", "\"momentum\"", ",", "0.9", ")", ")", "eps", "=", "float", "(", "attrs", ".", "get", "(", "\"eps\"", ",", "0.001", ")", ")", "bn_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"BatchNormalization\"", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ",", "epsilon", "=", "eps", ",", "momentum", "=", "momentum", ",", "# MXNet computes mean and variance per channel for batchnorm.", "# Default for onnx is across all spatial features. Relying on default", "# ONNX behavior of spatial=1 for ONNX opset 8 and below. As the spatial", "# attribute is deprecated in opset 9 and above, not explicitly encoding it.", ")", "return", "[", "bn_node", "]" ]
[ 491, 0 ]
[ 512, 20 ]
python
en
['en', 'cs', 'en']
True
convert_tanh
(node, **kwargs)
Map MXNet's tanh operator attributes to onnx's Tanh operator and return the created node.
Map MXNet's tanh operator attributes to onnx's Tanh operator and return the created node.
def convert_tanh(node, **kwargs): """Map MXNet's tanh operator attributes to onnx's Tanh operator and return the created node. """ return create_basic_op_node('Tanh', node, kwargs)
[ "def", "convert_tanh", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Tanh'", ",", "node", ",", "kwargs", ")" ]
[ 516, 0 ]
[ 520, 53 ]
python
en
['en', 'en', 'en']
True
convert_cos
(node, **kwargs)
Map MXNet's cos operator attributes to onnx's Cos operator and return the created node.
Map MXNet's cos operator attributes to onnx's Cos operator and return the created node.
def convert_cos(node, **kwargs): """Map MXNet's cos operator attributes to onnx's Cos operator and return the created node. """ return create_basic_op_node('Cos', node, kwargs)
[ "def", "convert_cos", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Cos'", ",", "node", ",", "kwargs", ")" ]
[ 523, 0 ]
[ 527, 52 ]
python
en
['en', 'en', 'en']
True
convert_sin
(node, **kwargs)
Map MXNet's sin operator attributes to onnx's Sin operator and return the created node.
Map MXNet's sin operator attributes to onnx's Sin operator and return the created node.
def convert_sin(node, **kwargs): """Map MXNet's sin operator attributes to onnx's Sin operator and return the created node. """ return create_basic_op_node('Sin', node, kwargs)
[ "def", "convert_sin", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sin'", ",", "node", ",", "kwargs", ")" ]
[ 530, 0 ]
[ 534, 52 ]
python
en
['en', 'en', 'en']
True
convert_tan
(node, **kwargs)
Map MXNet's tan operator attributes to onnx's tan operator and return the created node.
Map MXNet's tan operator attributes to onnx's tan operator and return the created node.
def convert_tan(node, **kwargs): """Map MXNet's tan operator attributes to onnx's tan operator and return the created node. """ return create_basic_op_node('Tan', node, kwargs)
[ "def", "convert_tan", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Tan'", ",", "node", ",", "kwargs", ")" ]
[ 537, 0 ]
[ 541, 52 ]
python
en
['id', 'en', 'en']
True
convert_acos
(node, **kwargs)
Map MXNet's acos operator attributes to onnx's acos operator and return the created node.
Map MXNet's acos operator attributes to onnx's acos operator and return the created node.
def convert_acos(node, **kwargs): """Map MXNet's acos operator attributes to onnx's acos operator and return the created node. """ return create_basic_op_node('Acos', node, kwargs)
[ "def", "convert_acos", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Acos'", ",", "node", ",", "kwargs", ")" ]
[ 544, 0 ]
[ 548, 53 ]
python
en
['en', 'en', 'en']
True
convert_asin
(node, **kwargs)
Map MXNet's asin operator attributes to onnx's asin operator and return the created node.
Map MXNet's asin operator attributes to onnx's asin operator and return the created node.
def convert_asin(node, **kwargs): """Map MXNet's asin operator attributes to onnx's asin operator and return the created node. """ return create_basic_op_node('Asin', node, kwargs)
[ "def", "convert_asin", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Asin'", ",", "node", ",", "kwargs", ")" ]
[ 551, 0 ]
[ 555, 53 ]
python
en
['en', 'en', 'en']
True
convert_atan
(node, **kwargs)
Map MXNet's atan operator attributes to onnx's atan operator and return the created node.
Map MXNet's atan operator attributes to onnx's atan operator and return the created node.
def convert_atan(node, **kwargs): """Map MXNet's atan operator attributes to onnx's atan operator and return the created node. """ return create_basic_op_node('Atan', node, kwargs)
[ "def", "convert_atan", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Atan'", ",", "node", ",", "kwargs", ")" ]
[ 558, 0 ]
[ 562, 53 ]
python
en
['en', 'su', 'en']
True
convert_sigmoid
(node, **kwargs)
Map MXNet's sigmoid operator attributes to onnx's Sigmoid operator and return the created node.
Map MXNet's sigmoid operator attributes to onnx's Sigmoid operator and return the created node.
def convert_sigmoid(node, **kwargs): """Map MXNet's sigmoid operator attributes to onnx's Sigmoid operator and return the created node. """ return create_basic_op_node('Sigmoid', node, kwargs)
[ "def", "convert_sigmoid", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sigmoid'", ",", "node", ",", "kwargs", ")" ]
[ 566, 0 ]
[ 570, 56 ]
python
en
['en', 'en', 'en']
True
convert_relu
(node, **kwargs)
Map MXNet's relu operator attributes to onnx's Relu operator and return the created node.
Map MXNet's relu operator attributes to onnx's Relu operator and return the created node.
def convert_relu(node, **kwargs): """Map MXNet's relu operator attributes to onnx's Relu operator and return the created node. """ return create_basic_op_node('Relu', node, kwargs)
[ "def", "convert_relu", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Relu'", ",", "node", ",", "kwargs", ")" ]
[ 573, 0 ]
[ 577, 53 ]
python
en
['en', 'en', 'en']
True
convert_activation
(node, **kwargs)
Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator and return the created node.
Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator and return the created node.
def convert_activation(node, **kwargs): """Map MXNet's Activation operator attributes to onnx's Tanh/Relu operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) act_type = attrs["act_type"] # Creating a dictionary here, but if this titlecase pattern # mxnet_name.title() act_types = { "tanh": "Tanh", "relu": "Relu", "sigmoid": "Sigmoid", "softrelu": "Softplus", "softsign": "Softsign" } act_name = act_types.get(act_type) if act_name: node = onnx.helper.make_node( act_name, input_nodes, [name], name=name ) else: raise AttributeError( "Activation %s not implemented or recognized in the converter" % act_type ) return [node]
[ "def", "convert_activation", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "act_type", "=", "attrs", "[", "\"act_type\"", "]", "# Creating a dictionary here, but if this titlecase pattern", "# mxnet_name.title()", "act_types", "=", "{", "\"tanh\"", ":", "\"Tanh\"", ",", "\"relu\"", ":", "\"Relu\"", ",", "\"sigmoid\"", ":", "\"Sigmoid\"", ",", "\"softrelu\"", ":", "\"Softplus\"", ",", "\"softsign\"", ":", "\"Softsign\"", "}", "act_name", "=", "act_types", ".", "get", "(", "act_type", ")", "if", "act_name", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "act_name", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ")", "else", ":", "raise", "AttributeError", "(", "\"Activation %s not implemented or recognized in the converter\"", "%", "act_type", ")", "return", "[", "node", "]" ]
[ 580, 0 ]
[ 611, 17 ]
python
en
['en', 'en', 'en']
True
convert_pad
(node, **kwargs)
Map MXNet's pad operator attributes to onnx's Pad operator and return the created node.
Map MXNet's pad operator attributes to onnx's Pad operator and return the created node.
def convert_pad(node, **kwargs): """Map MXNet's pad operator attributes to onnx's Pad operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mxnet_pad_width = convert_string_to_list(attrs.get("pad_width")) onnx_pad_width = transform_padding(mxnet_pad_width) pad_mode = attrs.get("mode") if pad_mode == "constant": pad_value = float(attrs.get("constant_value")) \ if "constant_value" in attrs else 0.0 node = onnx.helper.make_node( 'Pad', inputs=input_nodes, outputs=[name], mode='constant', value=pad_value, pads=onnx_pad_width, name=name ) else: node = onnx.helper.make_node( 'Pad', inputs=input_nodes, outputs=[name], mode=pad_mode, pads=onnx_pad_width, name=name ) return [node]
[ "def", "convert_pad", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mxnet_pad_width", "=", "convert_string_to_list", "(", "attrs", ".", "get", "(", "\"pad_width\"", ")", ")", "onnx_pad_width", "=", "transform_padding", "(", "mxnet_pad_width", ")", "pad_mode", "=", "attrs", ".", "get", "(", "\"mode\"", ")", "if", "pad_mode", "==", "\"constant\"", ":", "pad_value", "=", "float", "(", "attrs", ".", "get", "(", "\"constant_value\"", ")", ")", "if", "\"constant_value\"", "in", "attrs", "else", "0.0", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Pad'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "mode", "=", "'constant'", ",", "value", "=", "pad_value", ",", "pads", "=", "onnx_pad_width", ",", "name", "=", "name", ")", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Pad'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "mode", "=", "pad_mode", ",", "pads", "=", "onnx_pad_width", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 615, 0 ]
[ 648, 17 ]
python
en
['en', 'en', 'en']
True
create_helper_tensor_node
(input_vals, output_name, kwargs)
create extra tensor node from numpy values
create extra tensor node from numpy values
def create_helper_tensor_node(input_vals, output_name, kwargs): """create extra tensor node from numpy values""" data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[input_vals.dtype] tensor_node = onnx.helper.make_tensor_value_info( name=output_name, elem_type=data_type, shape=input_vals.shape ) kwargs["initializer"].append( onnx.helper.make_tensor( name=output_name, data_type=data_type, dims=input_vals.shape, vals=input_vals.flatten(), raw=False, ) ) return [tensor_node]
[ "def", "create_helper_tensor_node", "(", "input_vals", ",", "output_name", ",", "kwargs", ")", ":", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "input_vals", ".", "dtype", "]", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "name", "=", "output_name", ",", "elem_type", "=", "data_type", ",", "shape", "=", "input_vals", ".", "shape", ")", "kwargs", "[", "\"initializer\"", "]", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "output_name", ",", "data_type", "=", "data_type", ",", "dims", "=", "input_vals", ".", "shape", ",", "vals", "=", "input_vals", ".", "flatten", "(", ")", ",", "raw", "=", "False", ",", ")", ")", "return", "[", "tensor_node", "]" ]
[ 650, 0 ]
[ 669, 24 ]
python
en
['en', 'en', 'en']
True
create_helper_reshape_node
(input_name, output_name, shape, kwargs)
create extra reshape node with static shape
create extra reshape node with static shape
def create_helper_reshape_node(input_name, output_name, shape, kwargs): """create extra reshape node with static shape""" shape_tensor_node, = create_helper_tensor_node( np.asarray(shape, dtype=np.int64), output_name + "__shape", kwargs ) reshape_node = onnx.helper.make_node( "Reshape", inputs=[input_name, shape_tensor_node.name], outputs=[output_name], name=output_name ) return [shape_tensor_node, reshape_node]
[ "def", "create_helper_reshape_node", "(", "input_name", ",", "output_name", ",", "shape", ",", "kwargs", ")", ":", "shape_tensor_node", ",", "=", "create_helper_tensor_node", "(", "np", ".", "asarray", "(", "shape", ",", "dtype", "=", "np", ".", "int64", ")", ",", "output_name", "+", "\"__shape\"", ",", "kwargs", ")", "reshape_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Reshape\"", ",", "inputs", "=", "[", "input_name", ",", "shape_tensor_node", ".", "name", "]", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ")", "return", "[", "shape_tensor_node", ",", "reshape_node", "]" ]
[ 671, 0 ]
[ 683, 44 ]
python
en
['en', 'en', 'en']
True
create_helper_trans_node
(input_name, output_name, perm=None)
create extra transpose node
create extra transpose node
def create_helper_trans_node(input_name, output_name, perm=None): """create extra transpose node""" attrs = {} if perm is not None: attrs['perm'] = perm trans_node = onnx.helper.make_node( 'Transpose', inputs=[input_name], outputs=[output_name], name=output_name, **attrs ) return [trans_node]
[ "def", "create_helper_trans_node", "(", "input_name", ",", "output_name", ",", "perm", "=", "None", ")", ":", "attrs", "=", "{", "}", "if", "perm", "is", "not", "None", ":", "attrs", "[", "'perm'", "]", "=", "perm", "trans_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Transpose'", ",", "inputs", "=", "[", "input_name", "]", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ",", "*", "*", "attrs", ")", "return", "[", "trans_node", "]" ]
[ 685, 0 ]
[ 697, 23 ]
python
en
['es', 'pt', 'en']
False
create_helper_concat_node
(inputs, output_name, axis=0)
create extra concat node
create extra concat node
def create_helper_concat_node(inputs, output_name, axis=0): """create extra concat node""" concat_node = onnx.helper.make_node( "Concat", inputs=inputs, outputs=[output_name], name=output_name, axis=axis, ) return [concat_node]
[ "def", "create_helper_concat_node", "(", "inputs", ",", "output_name", ",", "axis", "=", "0", ")", ":", "concat_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Concat\"", ",", "inputs", "=", "inputs", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ",", "axis", "=", "axis", ",", ")", "return", "[", "concat_node", "]" ]
[ 699, 0 ]
[ 708, 24 ]
python
es
['es', 'la', 'it']
False
create_helper_expand_node
(input_name, output_name, expand_shape)
create extra expand node
create extra expand node
def create_helper_expand_node(input_name, output_name, expand_shape): """create extra expand node""" expand_node = onnx.helper.make_node( "Expand", inputs=[input_name, expand_shape], outputs=[output_name], name=output_name, ) return [expand_node]
[ "def", "create_helper_expand_node", "(", "input_name", ",", "output_name", ",", "expand_shape", ")", ":", "expand_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Expand\"", ",", "inputs", "=", "[", "input_name", ",", "expand_shape", "]", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ",", ")", "return", "[", "expand_node", "]" ]
[ 710, 0 ]
[ 718, 24 ]
python
en
['es', 'pt', 'en']
False
create_helper_gather_node
( input_name, output_name, indices, kwargs, axis=None )
create extra gather node with static indices
create extra gather node with static indices
def create_helper_gather_node( input_name, output_name, indices, kwargs, axis=None ): """create extra gather node with static indices""" attrs = {} if axis is not None: attrs['axis'] = axis gather_tensor_node, = create_helper_tensor_node( np.asarray(indices, np.int64), output_name + "__indices", kwargs ) gather_node = onnx.helper.make_node( "Gather", inputs=[input_name, gather_tensor_node.name], outputs=[output_name], name=output_name, **attrs ) return [gather_tensor_node, gather_node]
[ "def", "create_helper_gather_node", "(", "input_name", ",", "output_name", ",", "indices", ",", "kwargs", ",", "axis", "=", "None", ")", ":", "attrs", "=", "{", "}", "if", "axis", "is", "not", "None", ":", "attrs", "[", "'axis'", "]", "=", "axis", "gather_tensor_node", ",", "=", "create_helper_tensor_node", "(", "np", ".", "asarray", "(", "indices", ",", "np", ".", "int64", ")", ",", "output_name", "+", "\"__indices\"", ",", "kwargs", ")", "gather_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Gather\"", ",", "inputs", "=", "[", "input_name", ",", "gather_tensor_node", ".", "name", "]", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ",", "*", "*", "attrs", ")", "return", "[", "gather_tensor_node", ",", "gather_node", "]" ]
[ 720, 0 ]
[ 739, 44 ]
python
en
['en', 'en', 'en']
True
create_helper_build_values_node
( inputs, output_name, dtype, kwargs, axis=0 )
create extra node, with specified values (allows mixing node names and static values)
create extra node, with specified values
def create_helper_build_values_node( inputs, output_name, dtype, kwargs, axis=0 ): """create extra node, with specified values (allows mixing node names and static values) """ values = [] tensor_nodes = [] for idx, inp in enumerate(inputs): if not isinstance(inp, (str, bytes)): inp, = create_helper_tensor_node( np.array([inp], dtype=dtype), output_name + "__value" + str(idx), kwargs ) tensor_nodes.append(inp) inp = inp.name values.append(inp) concat_node, = create_helper_concat_node(values, output_name, axis=axis) return tensor_nodes + [concat_node,]
[ "def", "create_helper_build_values_node", "(", "inputs", ",", "output_name", ",", "dtype", ",", "kwargs", ",", "axis", "=", "0", ")", ":", "values", "=", "[", "]", "tensor_nodes", "=", "[", "]", "for", "idx", ",", "inp", "in", "enumerate", "(", "inputs", ")", ":", "if", "not", "isinstance", "(", "inp", ",", "(", "str", ",", "bytes", ")", ")", ":", "inp", ",", "=", "create_helper_tensor_node", "(", "np", ".", "array", "(", "[", "inp", "]", ",", "dtype", "=", "dtype", ")", ",", "output_name", "+", "\"__value\"", "+", "str", "(", "idx", ")", ",", "kwargs", ")", "tensor_nodes", ".", "append", "(", "inp", ")", "inp", "=", "inp", ".", "name", "values", ".", "append", "(", "inp", ")", "concat_node", ",", "=", "create_helper_concat_node", "(", "values", ",", "output_name", ",", "axis", "=", "axis", ")", "return", "tensor_nodes", "+", "[", "concat_node", ",", "]" ]
[ 741, 0 ]
[ 762, 40 ]
python
en
['en', 'en', 'en']
True
create_helper_shape_node
(input_name, output_name)
create extra shape node for specified input node
create extra shape node for specified input node
def create_helper_shape_node(input_name, output_name): """create extra shape node for specified input node""" shape_node = onnx.helper.make_node( "Shape", inputs=[input_name], outputs=[output_name], name=output_name, ) return [shape_node]
[ "def", "create_helper_shape_node", "(", "input_name", ",", "output_name", ")", ":", "shape_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Shape\"", ",", "inputs", "=", "[", "input_name", "]", ",", "outputs", "=", "[", "output_name", "]", ",", "name", "=", "output_name", ",", ")", "return", "[", "shape_node", "]" ]
[ 764, 0 ]
[ 772, 23 ]
python
en
['en', 'en', 'en']
True
convert_dot
(node, **kwargs)
Map MXNet's dot operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes.
Map MXNet's dot operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes.
def convert_dot(node, **kwargs): """Map MXNet's dot operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes.""" name, input_nodes, attrs = get_inputs(node, kwargs) input_node_a = input_nodes[0] input_node_b = input_nodes[1] trans_a_node = None trans_b_node = None trans_a = get_boolean_attribute_value(attrs, "transpose_a") trans_b = get_boolean_attribute_value(attrs, "transpose_b") op_name = "transpose" + str(kwargs["idx"]) if trans_a: input_node_a = op_name + "_a" trans_a_node, = create_helper_trans_node(input_nodes[0], input_node_a) if trans_b: input_node_b = op_name + "_b" trans_b_node, = create_helper_trans_node(input_nodes[1], input_node_b) matmul_node = onnx.helper.make_node( 'MatMul', inputs=[input_node_a, input_node_b], outputs=[name], name=name ) if not trans_a and not trans_b: return [matmul_node] elif trans_a and not trans_b: return [trans_a_node, matmul_node] elif trans_b and not trans_a: return [trans_b_node, matmul_node] else: return [trans_a_node, trans_b_node, matmul_node]
[ "def", "convert_dot", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "input_node_a", "=", "input_nodes", "[", "0", "]", "input_node_b", "=", "input_nodes", "[", "1", "]", "trans_a_node", "=", "None", "trans_b_node", "=", "None", "trans_a", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"transpose_a\"", ")", "trans_b", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"transpose_b\"", ")", "op_name", "=", "\"transpose\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "if", "trans_a", ":", "input_node_a", "=", "op_name", "+", "\"_a\"", "trans_a_node", ",", "=", "create_helper_trans_node", "(", "input_nodes", "[", "0", "]", ",", "input_node_a", ")", "if", "trans_b", ":", "input_node_b", "=", "op_name", "+", "\"_b\"", "trans_b_node", ",", "=", "create_helper_trans_node", "(", "input_nodes", "[", "1", "]", ",", "input_node_b", ")", "matmul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'MatMul'", ",", "inputs", "=", "[", "input_node_a", ",", "input_node_b", "]", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "if", "not", "trans_a", "and", "not", "trans_b", ":", "return", "[", "matmul_node", "]", "elif", "trans_a", "and", "not", "trans_b", ":", "return", "[", "trans_a_node", ",", "matmul_node", "]", "elif", "trans_b", "and", "not", "trans_a", ":", "return", "[", "trans_b_node", ",", "matmul_node", "]", "else", ":", "return", "[", "trans_a_node", ",", "trans_b_node", ",", "matmul_node", "]" ]
[ 775, 0 ]
[ 812, 56 ]
python
en
['en', 'en', 'en']
True
convert_linalg_gemm2
(node, **kwargs)
Map MXNet's _linalg_gemm2 operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes. Return multiple nodes created.
Map MXNet's _linalg_gemm2 operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes. Return multiple nodes created.
def convert_linalg_gemm2(node, **kwargs): """Map MXNet's _linalg_gemm2 operator attributes to onnx's MatMul and Transpose operators based on the values set for transpose_a, transpose_b attributes. Return multiple nodes created. """ name, input_nodes, attrs = get_inputs(node, kwargs) # Getting the attributes and assigning default values. alpha = float(attrs.get("alpha", 1.0)) trans_a = get_boolean_attribute_value(attrs, "transpose_a") trans_b = get_boolean_attribute_value(attrs, "transpose_b") op_name = "transpose" + str(kwargs["idx"]) if alpha == 1.0 and trans_a == 0 and trans_b == 0: matmul_node = onnx.helper.make_node( 'MatMul', inputs=input_nodes, outputs=[name], name=name ) return [matmul_node] elif trans_a == 1 and trans_b == 0: op_name = "transpose" + str(kwargs["idx"]) node_name = op_name+"_a" trans_a_node = onnx.helper.make_node( 'Transpose', inputs=[input_nodes[0]], outputs=[op_name+"_a"], name=node_name ) matmul_node = onnx.helper.make_node( 'MatMul', inputs=[node_name, input_nodes[1]], outputs=[name], name=name ) return [trans_a_node, matmul_node] elif trans_a == 0 and trans_b == 1: node_name = op_name + "_b" trans_b_node = onnx.helper.make_node( 'Transpose', inputs=[input_nodes[1]], outputs=[op_name+"_b"], name=node_name ) matmul_node = onnx.helper.make_node( 'MatMul', inputs=[input_nodes[0], node_name], outputs=[name], name=name ) return [trans_b_node, matmul_node] else: node_name_a = op_name+"_a" trans_a_node = onnx.helper.make_node( 'Transpose', inputs=[input_nodes[0]], outputs=[op_name+"_a"], name=node_name_a ) node_name_b = op_name + "_b" trans_b_node = onnx.helper.make_node( 'Transpose', inputs=[input_nodes[1]], outputs=[op_name+"_b"], name=node_name_b ) matmul_node = onnx.helper.make_node( 'MatMul', inputs=input_nodes, outputs=[name], name=name ) return [trans_a_node, trans_b_node, matmul_node]
[ "def", "convert_linalg_gemm2", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "# Getting the attributes and assigning default values.", "alpha", "=", "float", "(", "attrs", ".", "get", "(", "\"alpha\"", ",", "1.0", ")", ")", "trans_a", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"transpose_a\"", ")", "trans_b", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"transpose_b\"", ")", "op_name", "=", "\"transpose\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "if", "alpha", "==", "1.0", "and", "trans_a", "==", "0", "and", "trans_b", "==", "0", ":", "matmul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'MatMul'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "matmul_node", "]", "elif", "trans_a", "==", "1", "and", "trans_b", "==", "0", ":", "op_name", "=", "\"transpose\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "node_name", "=", "op_name", "+", "\"_a\"", "trans_a_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Transpose'", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", "]", ",", "outputs", "=", "[", "op_name", "+", "\"_a\"", "]", ",", "name", "=", "node_name", ")", "matmul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'MatMul'", ",", "inputs", "=", "[", "node_name", ",", "input_nodes", "[", "1", "]", "]", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "trans_a_node", ",", "matmul_node", "]", "elif", "trans_a", "==", "0", "and", "trans_b", "==", "1", ":", "node_name", "=", "op_name", "+", "\"_b\"", "trans_b_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Transpose'", ",", "inputs", "=", "[", "input_nodes", "[", "1", "]", "]", ",", "outputs", "=", "[", "op_name", "+", "\"_b\"", "]", ",", "name", "=", "node_name", ")", "matmul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'MatMul'", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", ",", "node_name", "]", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "trans_b_node", ",", "matmul_node", "]", "else", ":", "node_name_a", "=", "op_name", "+", "\"_a\"", "trans_a_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Transpose'", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", "]", ",", "outputs", "=", "[", "op_name", "+", "\"_a\"", "]", ",", "name", "=", "node_name_a", ")", "node_name_b", "=", "op_name", "+", "\"_b\"", "trans_b_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Transpose'", ",", "inputs", "=", "[", "input_nodes", "[", "1", "]", "]", ",", "outputs", "=", "[", "op_name", "+", "\"_b\"", "]", ",", "name", "=", "node_name_b", ")", "matmul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'MatMul'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "trans_a_node", ",", "trans_b_node", ",", "matmul_node", "]" ]
[ 816, 0 ]
[ 898, 56 ]
python
en
['en', 'en', 'en']
True
convert_pooling
(node, **kwargs)
Map MXNet's Pooling operator attributes to onnx's MaxPool/AveragePool/GlobalMaxPool/GlobalAveragePool operators based on the input node's attributes and return the created node.
Map MXNet's Pooling operator attributes to onnx's MaxPool/AveragePool/GlobalMaxPool/GlobalAveragePool operators based on the input node's attributes and return the created node.
def convert_pooling(node, **kwargs): """Map MXNet's Pooling operator attributes to onnx's MaxPool/AveragePool/GlobalMaxPool/GlobalAveragePool operators based on the input node's attributes and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) kernel = eval(attrs["kernel"]) pool_type = attrs["pool_type"] if attrs.get("pool_type") else "max" stride = eval(attrs["stride"]) if attrs.get("stride") else (1, 1) global_pool = get_boolean_attribute_value(attrs, "global_pool") p_value = attrs.get('p_value', 'None') pooling_convention = attrs.get('pooling_convention', 'valid') ceil_mode = False if pooling_convention == 'full': if onnx.__version__ < "1.5.0": pooling_warning = "Pooling: ONNX lower than 1.5.0 doesn't support pooling_convention. " \ "This might lead to shape or accuracy issues. " \ "https://github.com/onnx/onnx/issues/549" ceil_mode = True logging.warning(pooling_warning) pad_dims = list(parse_helper(attrs, "pad", [0, 0])) pad_dims = pad_dims + pad_dims pool_types = {"max": "MaxPool", "avg": "AveragePool", "lp": "LpPool"} global_pool_types = {"max": "GlobalMaxPool", "avg": "GlobalAveragePool", "lp": "GlobalLpPool"} if pool_type == 'lp' and p_value == 'None': raise AttributeError('ONNX requires a p value for LpPool and GlobalLpPool') if global_pool: if pool_type == 'lp': node = onnx.helper.make_node( global_pool_types[pool_type], input_nodes, # input [name], p=int(p_value), name=name ) else: node = onnx.helper.make_node( global_pool_types[pool_type], input_nodes, # input [name], name=name ) else: if pool_type == 'lp': node = onnx.helper.make_node( pool_types[pool_type], input_nodes, # input [name], p=int(p_value), kernel_shape=kernel, pads=pad_dims, strides=stride, name=name ) else: if onnx.__version__ >= "1.5.0": node = onnx.helper.make_node( pool_types[pool_type], input_nodes, # input [name], kernel_shape=kernel, pads=pad_dims, strides=stride, name=name, ceil_mode=ceil_mode ) else: node = onnx.helper.make_node( pool_types[pool_type], input_nodes, # input [name], kernel_shape=kernel, pads=pad_dims, strides=stride, name=name ) return [node]
[ "def", "convert_pooling", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "kernel", "=", "eval", "(", "attrs", "[", "\"kernel\"", "]", ")", "pool_type", "=", "attrs", "[", "\"pool_type\"", "]", "if", "attrs", ".", "get", "(", "\"pool_type\"", ")", "else", "\"max\"", "stride", "=", "eval", "(", "attrs", "[", "\"stride\"", "]", ")", "if", "attrs", ".", "get", "(", "\"stride\"", ")", "else", "(", "1", ",", "1", ")", "global_pool", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"global_pool\"", ")", "p_value", "=", "attrs", ".", "get", "(", "'p_value'", ",", "'None'", ")", "pooling_convention", "=", "attrs", ".", "get", "(", "'pooling_convention'", ",", "'valid'", ")", "ceil_mode", "=", "False", "if", "pooling_convention", "==", "'full'", ":", "if", "onnx", ".", "__version__", "<", "\"1.5.0\"", ":", "pooling_warning", "=", "\"Pooling: ONNX lower than 1.5.0 doesn't support pooling_convention. \"", "\"This might lead to shape or accuracy issues. \"", "\"https://github.com/onnx/onnx/issues/549\"", "ceil_mode", "=", "True", "logging", ".", "warning", "(", "pooling_warning", ")", "pad_dims", "=", "list", "(", "parse_helper", "(", "attrs", ",", "\"pad\"", ",", "[", "0", ",", "0", "]", ")", ")", "pad_dims", "=", "pad_dims", "+", "pad_dims", "pool_types", "=", "{", "\"max\"", ":", "\"MaxPool\"", ",", "\"avg\"", ":", "\"AveragePool\"", ",", "\"lp\"", ":", "\"LpPool\"", "}", "global_pool_types", "=", "{", "\"max\"", ":", "\"GlobalMaxPool\"", ",", "\"avg\"", ":", "\"GlobalAveragePool\"", ",", "\"lp\"", ":", "\"GlobalLpPool\"", "}", "if", "pool_type", "==", "'lp'", "and", "p_value", "==", "'None'", ":", "raise", "AttributeError", "(", "'ONNX requires a p value for LpPool and GlobalLpPool'", ")", "if", "global_pool", ":", "if", "pool_type", "==", "'lp'", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "global_pool_types", "[", "pool_type", "]", ",", "input_nodes", ",", "# input", "[", "name", "]", ",", "p", "=", "int", "(", "p_value", ")", ",", "name", "=", "name", ")", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "global_pool_types", "[", "pool_type", "]", ",", "input_nodes", ",", "# input", "[", "name", "]", ",", "name", "=", "name", ")", "else", ":", "if", "pool_type", "==", "'lp'", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "pool_types", "[", "pool_type", "]", ",", "input_nodes", ",", "# input", "[", "name", "]", ",", "p", "=", "int", "(", "p_value", ")", ",", "kernel_shape", "=", "kernel", ",", "pads", "=", "pad_dims", ",", "strides", "=", "stride", ",", "name", "=", "name", ")", "else", ":", "if", "onnx", ".", "__version__", ">=", "\"1.5.0\"", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "pool_types", "[", "pool_type", "]", ",", "input_nodes", ",", "# input", "[", "name", "]", ",", "kernel_shape", "=", "kernel", ",", "pads", "=", "pad_dims", ",", "strides", "=", "stride", ",", "name", "=", "name", ",", "ceil_mode", "=", "ceil_mode", ")", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "pool_types", "[", "pool_type", "]", ",", "input_nodes", ",", "# input", "[", "name", "]", ",", "kernel_shape", "=", "kernel", ",", "pads", "=", "pad_dims", ",", "strides", "=", "stride", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 902, 0 ]
[ 985, 17 ]
python
en
['en', 'en', 'en']
True
convert_exp
(node, **kwargs)
Map MXNet's exp operator attributes to onnx's Exp operator and return the created node.
Map MXNet's exp operator attributes to onnx's Exp operator and return the created node.
def convert_exp(node, **kwargs): """Map MXNet's exp operator attributes to onnx's Exp operator and return the created node. """ return create_basic_op_node('Exp', node, kwargs)
[ "def", "convert_exp", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Exp'", ",", "node", ",", "kwargs", ")" ]
[ 989, 0 ]
[ 993, 52 ]
python
en
['en', 'en', 'en']
True
convert_copy
(node, **kwargs)
Map MXNet's _copy operator attributes to onnx's Identity operator and return the created node.
Map MXNet's _copy operator attributes to onnx's Identity operator and return the created node.
def convert_copy(node, **kwargs): """Map MXNet's _copy operator attributes to onnx's Identity operator and return the created node. """ return create_basic_op_node('Identity', node, kwargs)
[ "def", "convert_copy", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Identity'", ",", "node", ",", "kwargs", ")" ]
[ 996, 0 ]
[ 1000, 57 ]
python
en
['en', 'en', 'en']
True
convert_identity
(node, **kwargs)
Map MXNet's identity operator attributes to onnx's ConstantFill operator and return the created node.
Map MXNet's identity operator attributes to onnx's ConstantFill operator and return the created node.
def convert_identity(node, **kwargs): """Map MXNet's identity operator attributes to onnx's ConstantFill operator and return the created node. """ return create_basic_op_node('ConstantFill', node, kwargs)
[ "def", "convert_identity", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'ConstantFill'", ",", "node", ",", "kwargs", ")" ]
[ 1003, 0 ]
[ 1007, 61 ]
python
en
['en', 'en', 'en']
True
convert_instancenorm
(node, **kwargs)
Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node.
Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node.
def convert_instancenorm(node, **kwargs): """Map MXNet's InstanceNorm operator attributes to onnx's InstanceNormalization operator based on the input node's attributes and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) eps = float(attrs.get("eps", 0.001)) node = onnx.helper.make_node( 'InstanceNormalization', inputs=input_nodes, outputs=[name], name=name, epsilon=eps) return [node]
[ "def", "convert_instancenorm", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "eps", "=", "float", "(", "attrs", ".", "get", "(", "\"eps\"", ",", "0.001", ")", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'InstanceNormalization'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ",", "epsilon", "=", "eps", ")", "return", "[", "node", "]" ]
[ 1010, 0 ]
[ 1025, 17 ]
python
en
['en', 'en', 'en']
True
convert_leakyrelu
(node, **kwargs)
Map MXNet's LeakyReLU operator attributes to onnx's Elu/LeakyRelu/PRelu operators based on the input node's attributes and return the created node.
Map MXNet's LeakyReLU operator attributes to onnx's Elu/LeakyRelu/PRelu operators based on the input node's attributes and return the created node.
def convert_leakyrelu(node, **kwargs): """Map MXNet's LeakyReLU operator attributes to onnx's Elu/LeakyRelu/PRelu operators based on the input node's attributes and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) initializer = kwargs["initializer"] act_type = attrs.get("act_type", "leaky") alpha = float(attrs.get("slope", 0.25)) act_name = {"elu": "Elu", "leaky": "LeakyRelu", "prelu": "PRelu", "selu": "Selu"} reshape_val_name = 'reshape' + str(kwargs["idx"]) input_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('int64')] reshape_value = np.array([1, -1, 1, 1], dtype='int64') dims = np.shape(reshape_value) shape_node = onnx.helper.make_tensor_value_info(reshape_val_name, input_type, dims) initializer.append( onnx.helper.make_tensor( name=reshape_val_name, data_type=input_type, dims=dims, vals=reshape_value, raw=False, ) ) slope_op_name = 'slope' + str(kwargs["idx"]) lr_node = [] if act_type == "prelu" or act_type == "selu": reshape_slope_node = onnx.helper.make_node( 'Reshape', inputs=[input_nodes[1], reshape_val_name], outputs=[slope_op_name], name=slope_op_name ) node = onnx.helper.make_node( act_name[act_type], inputs=[input_nodes[0], slope_op_name], outputs=[name], name=name) lr_node.append(shape_node) lr_node.append(reshape_slope_node) lr_node.append(node) else: node = onnx.helper.make_node( act_name[act_type], inputs=input_nodes, outputs=[name], name=name, alpha=alpha) lr_node.append(node) return lr_node
[ "def", "convert_leakyrelu", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "act_type", "=", "attrs", ".", "get", "(", "\"act_type\"", ",", "\"leaky\"", ")", "alpha", "=", "float", "(", "attrs", ".", "get", "(", "\"slope\"", ",", "0.25", ")", ")", "act_name", "=", "{", "\"elu\"", ":", "\"Elu\"", ",", "\"leaky\"", ":", "\"LeakyRelu\"", ",", "\"prelu\"", ":", "\"PRelu\"", ",", "\"selu\"", ":", "\"Selu\"", "}", "reshape_val_name", "=", "'reshape'", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "input_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "np", ".", "dtype", "(", "'int64'", ")", "]", "reshape_value", "=", "np", ".", "array", "(", "[", "1", ",", "-", "1", ",", "1", ",", "1", "]", ",", "dtype", "=", "'int64'", ")", "dims", "=", "np", ".", "shape", "(", "reshape_value", ")", "shape_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "reshape_val_name", ",", "input_type", ",", "dims", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "reshape_val_name", ",", "data_type", "=", "input_type", ",", "dims", "=", "dims", ",", "vals", "=", "reshape_value", ",", "raw", "=", "False", ",", ")", ")", "slope_op_name", "=", "'slope'", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "lr_node", "=", "[", "]", "if", "act_type", "==", "\"prelu\"", "or", "act_type", "==", "\"selu\"", ":", "reshape_slope_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'Reshape'", ",", "inputs", "=", "[", "input_nodes", "[", "1", "]", ",", "reshape_val_name", "]", ",", "outputs", "=", "[", "slope_op_name", "]", ",", "name", "=", "slope_op_name", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "act_name", "[", "act_type", "]", ",", "inputs", "=", "[", "input_nodes", "[", "0", "]", ",", "slope_op_name", "]", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "lr_node", ".", "append", "(", "shape_node", ")", "lr_node", ".", "append", "(", "reshape_slope_node", ")", "lr_node", ".", "append", "(", "node", ")", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "act_name", "[", "act_type", "]", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ",", "alpha", "=", "alpha", ")", "lr_node", ".", "append", "(", "node", ")", "return", "lr_node" ]
[ 1031, 0 ]
[ 1089, 18 ]
python
af
['en', 'af', 'sw']
False
convert_softmax
(node, **kwargs)
Map MXNet's softmax operator attributes to onnx's Softmax operator and return the created node.
Map MXNet's softmax operator attributes to onnx's Softmax operator and return the created node.
def convert_softmax(node, **kwargs): """Map MXNet's softmax operator attributes to onnx's Softmax operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis", -1)) c_softmax_node = [] axis = -1 transpose_node1 = onnx.helper.make_node( "Transpose", inputs=input_nodes, perm=(0, 2, 3, 1), # NCHW--NHWC--(NHW,C) name=name + '_tr1', outputs=[name + '_tr1'] ) softmax_node = onnx.helper.make_node( "Softmax", inputs=[name + '_tr1'], axis=axis, name=name + '', outputs=[name + ''] ) transpose_node2 = onnx.helper.make_node( "Transpose", inputs=[name + ''], perm=(0, 3, 1, 2), # NHWC--NCHW name=name + '_tr2', outputs=[name + '_tr2'] ) c_softmax_node.append(transpose_node1) c_softmax_node.append(softmax_node) c_softmax_node.append(transpose_node2) return c_softmax_node
[ "def", "convert_softmax", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ",", "-", "1", ")", ")", "c_softmax_node", "=", "[", "]", "axis", "=", "-", "1", "transpose_node1", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Transpose\"", ",", "inputs", "=", "input_nodes", ",", "perm", "=", "(", "0", ",", "2", ",", "3", ",", "1", ")", ",", "# NCHW--NHWC--(NHW,C)", "name", "=", "name", "+", "'_tr1'", ",", "outputs", "=", "[", "name", "+", "'_tr1'", "]", ")", "softmax_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Softmax\"", ",", "inputs", "=", "[", "name", "+", "'_tr1'", "]", ",", "axis", "=", "axis", ",", "name", "=", "name", "+", "''", ",", "outputs", "=", "[", "name", "+", "''", "]", ")", "transpose_node2", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Transpose\"", ",", "inputs", "=", "[", "name", "+", "''", "]", ",", "perm", "=", "(", "0", ",", "3", ",", "1", ",", "2", ")", ",", "# NHWC--NCHW", "name", "=", "name", "+", "'_tr2'", ",", "outputs", "=", "[", "name", "+", "'_tr2'", "]", ")", "c_softmax_node", ".", "append", "(", "transpose_node1", ")", "c_softmax_node", ".", "append", "(", "softmax_node", ")", "c_softmax_node", ".", "append", "(", "transpose_node2", ")", "return", "c_softmax_node" ]
[ 1111, 0 ]
[ 1149, 25 ]
python
en
['en', 'en', 'en']
True
convert_blockgrad
(node, **kwargs)
Skip operator
Skip operator
def convert_blockgrad(node, **kwargs): """ Skip operator """ return create_basic_op_node('ConstantFill', node, kwargs)
[ "def", "convert_blockgrad", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'ConstantFill'", ",", "node", ",", "kwargs", ")" ]
[ 1153, 0 ]
[ 1155, 61 ]
python
en
['en', 'hr', 'en']
False
convert_makeloss
(node, **kwargs)
Skip operator
Skip operator
def convert_makeloss(node, **kwargs): """ Skip operator """ return create_basic_op_node('ConstantFill', node, kwargs)
[ "def", "convert_makeloss", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'ConstantFill'", ",", "node", ",", "kwargs", ")" ]
[ 1158, 0 ]
[ 1160, 61 ]
python
en
['en', 'hr', 'en']
False
convert_concat
(node, **kwargs)
Map MXNet's Concat operator attributes to onnx's Concat operator and return the created node.
Map MXNet's Concat operator attributes to onnx's Concat operator and return the created node.
def convert_concat(node, **kwargs): """Map MXNet's Concat operator attributes to onnx's Concat operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("dim", 1)) concat_node = onnx.helper.make_node( "Concat", input_nodes, [name], axis=axis, name=name ) return [concat_node]
[ "def", "convert_concat", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"dim\"", ",", "1", ")", ")", "concat_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Concat\"", ",", "input_nodes", ",", "[", "name", "]", ",", "axis", "=", "axis", ",", "name", "=", "name", ")", "return", "[", "concat_node", "]" ]
[ 1163, 0 ]
[ 1177, 24 ]
python
en
['en', 'la', 'en']
True
convert_RNN
(node, **kwargs)
Map MXNet's RNN operator attributes to onnx's RNN operator and return the created node.
Map MXNet's RNN operator attributes to onnx's RNN operator and return the created node.
def convert_RNN(node, **kwargs): """Map MXNet's RNN operator attributes to onnx's RNN operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) nodes = [] # ============================== Attributes ============================== mode = attrs['mode'].upper() rnn_kwargs = {} if mode != 'LSTM': raise NotImplementedError( "Only LSTM mode RNN conversion to ONNX is currently supported." ) hidden_size = rnn_kwargs['hidden_size'] = int(attrs.get("state_size")) if eval(attrs.get('bidirectional', 'False')): rnn_kwargs['direction'] = 'bidirectional' num_directions = 2 else: rnn_kwargs['direction'] = 'forward' num_directions = 1 clip_min = eval(attrs.get('lstm_state_clip_min', 'None')) clip_max = eval(attrs.get('lstm_state_clip_max', 'None')) if clip_min is not None or clip_max is not None: # ONNX LSTMs have the `clip` attribute, however it seems to give # slightly different results, when compared to the MXNet equivalent raise NotImplementedError( "Conversion of RNNs with lstm_state_clip_min/max " "to ONNX is currently not supported." ) if eval(attrs.get('lstm_state_clip_nan', 'False')): raise NotImplementedError( "ONNX RNN operator doesn't support lstm_state_clip_nan" ) if eval(attrs.get('use_sequence_length', 'False')): # This can maybe be implemented using the `sequence_len` optional input raise NotImplementedError( "Conversion of RNNs with variable input sequence length " "to ONNX is currently not supported." ) if eval(attrs.get('num_layers', '1')) != 1: raise NotImplementedError( "Conversion of RNNs with num_layers > 1 " "to ONNX is currently not supported." ) if eval(attrs.get('p', '0')) != 0: # WARNING! The `p` attribute in mxnet is "dropout probability" while # the `p` optional input of ONNX LSTMs is the peephole weights tensor. raise NotImplementedError( "Conversion of RNNs with dropout " "to ONNX is currently not supported." ) if eval(attrs.get('projection_size', 'None')) is not None: raise NotImplementedError( "Conversion of RNNs with custom projection_size " "to ONNX is currently not supported." ) if not eval(attrs.get('state_outputs', 'True')): raise NotImplementedError( "Conversion of RNNs with state_outputs=False " "to ONNX is currently not supported." ) # ============================== Parameters ============================== # (See _rnn_param_concat for part 1 of this comment section) # Unfortunately, mxnets version of _rnn_param_concat concatenates *ALL* # the parameters, instead of grouping them like ONNX. The workaround, # used here, is that the _rnn_param_concat node conversion code will # produce multiple nodes with names ending in rnn_param_concatN__P # (Where P is the parameter group name W, R or B) # We then use regular expressions to get the "extra outputs" of the # _rnn_param_concat node. x, param_concat, *initial_states = input_nodes param_pattern = re.compile(r'(.*rnn_param_concat[0-9]+__)[WRB]$') if not param_pattern.match(param_concat): # ToDo: Maybe do something more sane after Issue #17621 gets resolved raise NotImplementedError( "The order of RNN parameters is different between mxnet and ONNX. " "Currently, an automatic conversion is only possible, if the RNN " "parameters were concatenated using the internal " "_rnn_param_concat operator." ) w, r, b = ( param_pattern.sub(r'\1' + param, param_concat) for param in 'WRB' ) # The second conversion step handles # * parameter shapes, since mxnet uses flattened parameters, while # ONNX requires specific tensor shapes # * gate order, since both frameworks require the weights and biases # of the 4 basic gates (forget, input, cell and output) to be # concatenated, but in different order # ([ifco] for mxnet and [iofc] for ONNX) def fix_rnn_parameter(p, p_shape_in, p_shape_out, p_order=(0, 3, 1, 2)): p_ = p # 1) Reshape flat parameters to their original shape, such that # the gates are concatenated along axis=1 p_reshaped_in = create_helper_reshape_node( p, p_ + "__reshaped_in", p_shape_in, kwargs ) nodes.extend(p_reshaped_in) p = p_reshaped_in[-1].name # 2) Use a Gather node to pick gates along axis=1, permuting them p_reordered = create_helper_gather_node( p, p_ + "__reordered", p_order, kwargs, axis=1 ) nodes.extend(p_reordered) p = p_reordered[-1].name # 3) Reshape the parameters to their final shape, squeezing the gate # and hidden dimensions together p_reshaped_out = create_helper_reshape_node( p, p_ + "__reshaped_out", p_shape_out, kwargs ) nodes.extend(p_reshaped_out) return p_reshaped_out[-1].name w = fix_rnn_parameter( w, p_shape_in=(num_directions, 4, hidden_size, -1), p_shape_out=(num_directions, 4 * hidden_size, -1), ) r = fix_rnn_parameter( r, p_shape_in=(num_directions, 4, hidden_size, hidden_size), p_shape_out=(num_directions, 4 * hidden_size, hidden_size), ) b = fix_rnn_parameter( b, p_shape_in=(2 * num_directions, 4, hidden_size), p_shape_out=(num_directions, 8 * hidden_size), ) # ============================= Inputs/States ============================ input_shape = create_helper_shape_node(x, x + "__shape") nodes.extend(input_shape) input_shape = input_shape[-1].name batch_size = create_helper_gather_node( input_shape, x + "__batch_size", indices=[1], axis=0, kwargs=kwargs, ) nodes.extend(batch_size) batch_size = batch_size[-1].name state_shape = create_helper_build_values_node( [num_directions, batch_size, hidden_size], name + "__state_shape", dtype=np.int64, kwargs=kwargs, ) nodes.extend(state_shape) state_shape = state_shape[-1].name expanded_states = [] for state in initial_states: expanded_state = create_helper_expand_node( state, state + "__expanded", state_shape ) nodes.extend(expanded_state) expanded_states.append(expanded_state[-1].name) initial_states = expanded_states # =========================== RNN node/outputs =========================== y_out = [onnx.helper.make_node( mode, # RNN or LSTM or GRU inputs=[x, w, r, b, '', *initial_states], outputs=[name + '__Y'], name=name + '__Y', **rnn_kwargs )] nodes.extend(y_out) y = y_out[-1].name # We are almost done. The only thing left to do is to convert the output # of the RNN node from the [S, D, B, H] layout, which ONNX returns # to the [S, B, D*H] layout, which mxnet uses # 1) Transpose [S, D, B, H] -> [S, B, D, H] y_perm = (0, 2, 1, 3) y_transposed = create_helper_trans_node( y, y + "__transposed", y_perm ) nodes.extend(y_transposed) y = y_transposed[-1].name # 2) Reshape [S, B, D, H] -> [S, B, D*H] y_shape = (0, 0, -1) y_reshaped = create_helper_reshape_node(y, name, y_shape, kwargs) nodes.extend(y_reshaped) return nodes
[ "def", "convert_RNN", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "nodes", "=", "[", "]", "# ============================== Attributes ==============================", "mode", "=", "attrs", "[", "'mode'", "]", ".", "upper", "(", ")", "rnn_kwargs", "=", "{", "}", "if", "mode", "!=", "'LSTM'", ":", "raise", "NotImplementedError", "(", "\"Only LSTM mode RNN conversion to ONNX is currently supported.\"", ")", "hidden_size", "=", "rnn_kwargs", "[", "'hidden_size'", "]", "=", "int", "(", "attrs", ".", "get", "(", "\"state_size\"", ")", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'bidirectional'", ",", "'False'", ")", ")", ":", "rnn_kwargs", "[", "'direction'", "]", "=", "'bidirectional'", "num_directions", "=", "2", "else", ":", "rnn_kwargs", "[", "'direction'", "]", "=", "'forward'", "num_directions", "=", "1", "clip_min", "=", "eval", "(", "attrs", ".", "get", "(", "'lstm_state_clip_min'", ",", "'None'", ")", ")", "clip_max", "=", "eval", "(", "attrs", ".", "get", "(", "'lstm_state_clip_max'", ",", "'None'", ")", ")", "if", "clip_min", "is", "not", "None", "or", "clip_max", "is", "not", "None", ":", "# ONNX LSTMs have the `clip` attribute, however it seems to give", "# slightly different results, when compared to the MXNet equivalent", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with lstm_state_clip_min/max \"", "\"to ONNX is currently not supported.\"", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'lstm_state_clip_nan'", ",", "'False'", ")", ")", ":", "raise", "NotImplementedError", "(", "\"ONNX RNN operator doesn't support lstm_state_clip_nan\"", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'use_sequence_length'", ",", "'False'", ")", ")", ":", "# This can maybe be implemented using the `sequence_len` optional input", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with variable input sequence length \"", "\"to ONNX is currently not supported.\"", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'num_layers'", ",", "'1'", ")", ")", "!=", "1", ":", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with num_layers > 1 \"", "\"to ONNX is currently not supported.\"", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'p'", ",", "'0'", ")", ")", "!=", "0", ":", "# WARNING! The `p` attribute in mxnet is \"dropout probability\" while", "# the `p` optional input of ONNX LSTMs is the peephole weights tensor.", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with dropout \"", "\"to ONNX is currently not supported.\"", ")", "if", "eval", "(", "attrs", ".", "get", "(", "'projection_size'", ",", "'None'", ")", ")", "is", "not", "None", ":", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with custom projection_size \"", "\"to ONNX is currently not supported.\"", ")", "if", "not", "eval", "(", "attrs", ".", "get", "(", "'state_outputs'", ",", "'True'", ")", ")", ":", "raise", "NotImplementedError", "(", "\"Conversion of RNNs with state_outputs=False \"", "\"to ONNX is currently not supported.\"", ")", "# ============================== Parameters ==============================", "# (See _rnn_param_concat for part 1 of this comment section)", "# Unfortunately, mxnets version of _rnn_param_concat concatenates *ALL*", "# the parameters, instead of grouping them like ONNX. The workaround,", "# used here, is that the _rnn_param_concat node conversion code will", "# produce multiple nodes with names ending in rnn_param_concatN__P", "# (Where P is the parameter group name W, R or B)", "# We then use regular expressions to get the \"extra outputs\" of the", "# _rnn_param_concat node.", "x", ",", "param_concat", ",", "", "*", "initial_states", "=", "input_nodes", "param_pattern", "=", "re", ".", "compile", "(", "r'(.*rnn_param_concat[0-9]+__)[WRB]$'", ")", "if", "not", "param_pattern", ".", "match", "(", "param_concat", ")", ":", "# ToDo: Maybe do something more sane after Issue #17621 gets resolved", "raise", "NotImplementedError", "(", "\"The order of RNN parameters is different between mxnet and ONNX. \"", "\"Currently, an automatic conversion is only possible, if the RNN \"", "\"parameters were concatenated using the internal \"", "\"_rnn_param_concat operator.\"", ")", "w", ",", "r", ",", "b", "=", "(", "param_pattern", ".", "sub", "(", "r'\\1'", "+", "param", ",", "param_concat", ")", "for", "param", "in", "'WRB'", ")", "# The second conversion step handles", "# * parameter shapes, since mxnet uses flattened parameters, while", "# ONNX requires specific tensor shapes", "# * gate order, since both frameworks require the weights and biases", "# of the 4 basic gates (forget, input, cell and output) to be", "# concatenated, but in different order", "# ([ifco] for mxnet and [iofc] for ONNX)", "def", "fix_rnn_parameter", "(", "p", ",", "p_shape_in", ",", "p_shape_out", ",", "p_order", "=", "(", "0", ",", "3", ",", "1", ",", "2", ")", ")", ":", "p_", "=", "p", "# 1) Reshape flat parameters to their original shape, such that", "# the gates are concatenated along axis=1", "p_reshaped_in", "=", "create_helper_reshape_node", "(", "p", ",", "p_", "+", "\"__reshaped_in\"", ",", "p_shape_in", ",", "kwargs", ")", "nodes", ".", "extend", "(", "p_reshaped_in", ")", "p", "=", "p_reshaped_in", "[", "-", "1", "]", ".", "name", "# 2) Use a Gather node to pick gates along axis=1, permuting them", "p_reordered", "=", "create_helper_gather_node", "(", "p", ",", "p_", "+", "\"__reordered\"", ",", "p_order", ",", "kwargs", ",", "axis", "=", "1", ")", "nodes", ".", "extend", "(", "p_reordered", ")", "p", "=", "p_reordered", "[", "-", "1", "]", ".", "name", "# 3) Reshape the parameters to their final shape, squeezing the gate", "# and hidden dimensions together", "p_reshaped_out", "=", "create_helper_reshape_node", "(", "p", ",", "p_", "+", "\"__reshaped_out\"", ",", "p_shape_out", ",", "kwargs", ")", "nodes", ".", "extend", "(", "p_reshaped_out", ")", "return", "p_reshaped_out", "[", "-", "1", "]", ".", "name", "w", "=", "fix_rnn_parameter", "(", "w", ",", "p_shape_in", "=", "(", "num_directions", ",", "4", ",", "hidden_size", ",", "-", "1", ")", ",", "p_shape_out", "=", "(", "num_directions", ",", "4", "*", "hidden_size", ",", "-", "1", ")", ",", ")", "r", "=", "fix_rnn_parameter", "(", "r", ",", "p_shape_in", "=", "(", "num_directions", ",", "4", ",", "hidden_size", ",", "hidden_size", ")", ",", "p_shape_out", "=", "(", "num_directions", ",", "4", "*", "hidden_size", ",", "hidden_size", ")", ",", ")", "b", "=", "fix_rnn_parameter", "(", "b", ",", "p_shape_in", "=", "(", "2", "*", "num_directions", ",", "4", ",", "hidden_size", ")", ",", "p_shape_out", "=", "(", "num_directions", ",", "8", "*", "hidden_size", ")", ",", ")", "# ============================= Inputs/States ============================", "input_shape", "=", "create_helper_shape_node", "(", "x", ",", "x", "+", "\"__shape\"", ")", "nodes", ".", "extend", "(", "input_shape", ")", "input_shape", "=", "input_shape", "[", "-", "1", "]", ".", "name", "batch_size", "=", "create_helper_gather_node", "(", "input_shape", ",", "x", "+", "\"__batch_size\"", ",", "indices", "=", "[", "1", "]", ",", "axis", "=", "0", ",", "kwargs", "=", "kwargs", ",", ")", "nodes", ".", "extend", "(", "batch_size", ")", "batch_size", "=", "batch_size", "[", "-", "1", "]", ".", "name", "state_shape", "=", "create_helper_build_values_node", "(", "[", "num_directions", ",", "batch_size", ",", "hidden_size", "]", ",", "name", "+", "\"__state_shape\"", ",", "dtype", "=", "np", ".", "int64", ",", "kwargs", "=", "kwargs", ",", ")", "nodes", ".", "extend", "(", "state_shape", ")", "state_shape", "=", "state_shape", "[", "-", "1", "]", ".", "name", "expanded_states", "=", "[", "]", "for", "state", "in", "initial_states", ":", "expanded_state", "=", "create_helper_expand_node", "(", "state", ",", "state", "+", "\"__expanded\"", ",", "state_shape", ")", "nodes", ".", "extend", "(", "expanded_state", ")", "expanded_states", ".", "append", "(", "expanded_state", "[", "-", "1", "]", ".", "name", ")", "initial_states", "=", "expanded_states", "# =========================== RNN node/outputs ===========================", "y_out", "=", "[", "onnx", ".", "helper", ".", "make_node", "(", "mode", ",", "# RNN or LSTM or GRU", "inputs", "=", "[", "x", ",", "w", ",", "r", ",", "b", ",", "''", ",", "*", "initial_states", "]", ",", "outputs", "=", "[", "name", "+", "'__Y'", "]", ",", "name", "=", "name", "+", "'__Y'", ",", "*", "*", "rnn_kwargs", ")", "]", "nodes", ".", "extend", "(", "y_out", ")", "y", "=", "y_out", "[", "-", "1", "]", ".", "name", "# We are almost done. The only thing left to do is to convert the output", "# of the RNN node from the [S, D, B, H] layout, which ONNX returns", "# to the [S, B, D*H] layout, which mxnet uses", "# 1) Transpose [S, D, B, H] -> [S, B, D, H]", "y_perm", "=", "(", "0", ",", "2", ",", "1", ",", "3", ")", "y_transposed", "=", "create_helper_trans_node", "(", "y", ",", "y", "+", "\"__transposed\"", ",", "y_perm", ")", "nodes", ".", "extend", "(", "y_transposed", ")", "y", "=", "y_transposed", "[", "-", "1", "]", ".", "name", "# 2) Reshape [S, B, D, H] -> [S, B, D*H]", "y_shape", "=", "(", "0", ",", "0", ",", "-", "1", ")", "y_reshaped", "=", "create_helper_reshape_node", "(", "y", ",", "name", ",", "y_shape", ",", "kwargs", ")", "nodes", ".", "extend", "(", "y_reshaped", ")", "return", "nodes" ]
[ 1180, 0 ]
[ 1391, 16 ]
python
en
['en', 'mt', 'en']
True
convert_rnn_param_concat
(node, **kwargs)
Map MXNet's _rnn_param_concat operator attributes to onnx's Concat operator and return the created node.
Map MXNet's _rnn_param_concat operator attributes to onnx's Concat operator and return the created node.
def convert_rnn_param_concat(node, **kwargs): """Map MXNet's _rnn_param_concat operator attributes to onnx's Concat operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("dim")) # mxnet RNN node and ONNX RNN/LSTM/GRU nodes # use different ways to store their parameters # The conversion between these formats is broken into 2 steps # The first step (performed here in _rnn_param_concat) regroups the # flattened parameters according to the table below. # The second step corrects the shapes and orders of gates and is # performed and described in more detail in the RNN node # mxnet [ONNX] -> ONNX (group) # i2h_weights [W (+ WB)] -> W (input weights) # h2h_weights [R (+ RB)] -> R (recurrence weights) # i2h_biases [Wb (+ WBb)] -> B = [Wb + Rb (+ WBb + RBb)] # h2h_biases [Rb (+ RBb)] -> (biases) split = len(input_nodes) // 2 weights, biases = input_nodes[:split], input_nodes[split:] i2h_weights = weights[::2] h2h_weights = weights[1::2] i2h_biases = biases[::2] h2h_biases = biases[1::2] reordered_biases = [ bias for pair in zip(i2h_biases, h2h_biases) for bias in pair ] # The order of mxnet parameters in the inputs is: # [ # '{}{}_{}_{}'.format(d, l, g, t) # for t in ['weight', 'bias'] # for l in range(num_layers) # for d in ['l', 'r'][:num_directions] # for g in ['i2h', 'h2h'] # ] w = onnx.helper.make_node( "Concat", inputs=i2h_weights, outputs=[name + "__W"], axis=axis, name=name + "__W" ) r = onnx.helper.make_node( "Concat", inputs=h2h_weights, outputs=[name + "__R"], axis=axis, name=name + "__R" ) b = onnx.helper.make_node( "Concat", inputs=reordered_biases, outputs=[name + "__B"], axis=axis, name=name + "__B" ) return [w, r, b]
[ "def", "convert_rnn_param_concat", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"dim\"", ")", ")", "# mxnet RNN node and ONNX RNN/LSTM/GRU nodes", "# use different ways to store their parameters", "# The conversion between these formats is broken into 2 steps", "# The first step (performed here in _rnn_param_concat) regroups the", "# flattened parameters according to the table below.", "# The second step corrects the shapes and orders of gates and is", "# performed and described in more detail in the RNN node", "# mxnet [ONNX] -> ONNX (group)", "# i2h_weights [W (+ WB)] -> W (input weights)", "# h2h_weights [R (+ RB)] -> R (recurrence weights)", "# i2h_biases [Wb (+ WBb)] -> B = [Wb + Rb (+ WBb + RBb)]", "# h2h_biases [Rb (+ RBb)] -> (biases)", "split", "=", "len", "(", "input_nodes", ")", "//", "2", "weights", ",", "biases", "=", "input_nodes", "[", ":", "split", "]", ",", "input_nodes", "[", "split", ":", "]", "i2h_weights", "=", "weights", "[", ":", ":", "2", "]", "h2h_weights", "=", "weights", "[", "1", ":", ":", "2", "]", "i2h_biases", "=", "biases", "[", ":", ":", "2", "]", "h2h_biases", "=", "biases", "[", "1", ":", ":", "2", "]", "reordered_biases", "=", "[", "bias", "for", "pair", "in", "zip", "(", "i2h_biases", ",", "h2h_biases", ")", "for", "bias", "in", "pair", "]", "# The order of mxnet parameters in the inputs is:", "# [", "# '{}{}_{}_{}'.format(d, l, g, t)", "# for t in ['weight', 'bias']", "# for l in range(num_layers)", "# for d in ['l', 'r'][:num_directions]", "# for g in ['i2h', 'h2h']", "# ]", "w", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Concat\"", ",", "inputs", "=", "i2h_weights", ",", "outputs", "=", "[", "name", "+", "\"__W\"", "]", ",", "axis", "=", "axis", ",", "name", "=", "name", "+", "\"__W\"", ")", "r", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Concat\"", ",", "inputs", "=", "h2h_weights", ",", "outputs", "=", "[", "name", "+", "\"__R\"", "]", ",", "axis", "=", "axis", ",", "name", "=", "name", "+", "\"__R\"", ")", "b", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Concat\"", ",", "inputs", "=", "reordered_biases", ",", "outputs", "=", "[", "name", "+", "\"__B\"", "]", ",", "axis", "=", "axis", ",", "name", "=", "name", "+", "\"__B\"", ")", "return", "[", "w", ",", "r", ",", "b", "]" ]
[ 1394, 0 ]
[ 1458, 20 ]
python
en
['en', 'la', 'en']
True
convert_full
(node, **kwargs)
Map MXNet's _zeros, _ones and _full operators attributes to onnx's tensors and return the created node.
Map MXNet's _zeros, _ones and _full operators attributes to onnx's tensors and return the created node.
def convert_full(node, **kwargs): """Map MXNet's _zeros, _ones and _full operators attributes to onnx's tensors and return the created node. """ # ToDo: Use Constant or ConstantOfShape, when Issue #15101 is resolved? name, input_nodes, attrs = get_inputs(node, kwargs) del input_nodes # Convert "0"s dimensions to "1"s. This is a workaround for the case, where # mxnet symbols can broadcast "0"s, while ONNX can only broadcast over "1"s shape = convert_string_to_list(attrs["shape"]) shape = tuple(dim if dim else 1 for dim in shape) value = { '_zeros': 0.0, '_ones': 1.0, '_full': eval(attrs.get('value', '0')), }[node['op']] dtype = attrs.get('dtype') data = np.full(shape, value, dtype) return create_helper_tensor_node(data, name, kwargs)
[ "def", "convert_full", "(", "node", ",", "*", "*", "kwargs", ")", ":", "# ToDo: Use Constant or ConstantOfShape, when Issue #15101 is resolved?", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "del", "input_nodes", "# Convert \"0\"s dimensions to \"1\"s. This is a workaround for the case, where", "# mxnet symbols can broadcast \"0\"s, while ONNX can only broadcast over \"1\"s", "shape", "=", "convert_string_to_list", "(", "attrs", "[", "\"shape\"", "]", ")", "shape", "=", "tuple", "(", "dim", "if", "dim", "else", "1", "for", "dim", "in", "shape", ")", "value", "=", "{", "'_zeros'", ":", "0.0", ",", "'_ones'", ":", "1.0", ",", "'_full'", ":", "eval", "(", "attrs", ".", "get", "(", "'value'", ",", "'0'", ")", ")", ",", "}", "[", "node", "[", "'op'", "]", "]", "dtype", "=", "attrs", ".", "get", "(", "'dtype'", ")", "data", "=", "np", ".", "full", "(", "shape", ",", "value", ",", "dtype", ")", "return", "create_helper_tensor_node", "(", "data", ",", "name", ",", "kwargs", ")" ]
[ 1463, 0 ]
[ 1484, 56 ]
python
en
['en', 'en', 'en']
True
convert_transpose
(node, **kwargs)
Map MXNet's transpose operator attributes to onnx's Transpose operator and return the created node.
Map MXNet's transpose operator attributes to onnx's Transpose operator and return the created node.
def convert_transpose(node, **kwargs): """Map MXNet's transpose operator attributes to onnx's Transpose operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axes = attrs.get("axes", ()) if axes: axes = tuple(map(int, re.findall(r'\d+', axes))) transpose_node = onnx.helper.make_node( "Transpose", input_nodes, [name], perm=axes, name=name ) else: transpose_node = onnx.helper.make_node( "Transpose", input_nodes, [name], name=name ) return [transpose_node]
[ "def", "convert_transpose", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axes", "=", "attrs", ".", "get", "(", "\"axes\"", ",", "(", ")", ")", "if", "axes", ":", "axes", "=", "tuple", "(", "map", "(", "int", ",", "re", ".", "findall", "(", "r'\\d+'", ",", "axes", ")", ")", ")", "transpose_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Transpose\"", ",", "input_nodes", ",", "[", "name", "]", ",", "perm", "=", "axes", ",", "name", "=", "name", ")", "else", ":", "transpose_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Transpose\"", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "transpose_node", "]" ]
[ 1487, 0 ]
[ 1512, 27 ]
python
en
['en', 'en', 'en']
True
convert_lrn
(node, **kwargs)
Map MXNet's LRN operator attributes to onnx's LRN operator and return the created node.
Map MXNet's LRN operator attributes to onnx's LRN operator and return the created node.
def convert_lrn(node, **kwargs): """Map MXNet's LRN operator attributes to onnx's LRN operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) alpha = float(attrs.get("alpha", 0.0001)) beta = float(attrs.get("beta", 0.75)) bias = float(attrs.get("knorm", 1.0)) size = int(attrs.get("nsize")) lrn_node = onnx.helper.make_node( "LRN", inputs=input_nodes, outputs=[name], name=name, alpha=alpha, beta=beta, bias=bias, size=size ) return [lrn_node]
[ "def", "convert_lrn", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "alpha", "=", "float", "(", "attrs", ".", "get", "(", "\"alpha\"", ",", "0.0001", ")", ")", "beta", "=", "float", "(", "attrs", ".", "get", "(", "\"beta\"", ",", "0.75", ")", ")", "bias", "=", "float", "(", "attrs", ".", "get", "(", "\"knorm\"", ",", "1.0", ")", ")", "size", "=", "int", "(", "attrs", ".", "get", "(", "\"nsize\"", ")", ")", "lrn_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"LRN\"", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ",", "alpha", "=", "alpha", ",", "beta", "=", "beta", ",", "bias", "=", "bias", ",", "size", "=", "size", ")", "return", "[", "lrn_node", "]" ]
[ 1516, 0 ]
[ 1538, 21 ]
python
en
['en', 'mt', 'en']
True
convert_l2normalization
(node, **kwargs)
Map MXNet's L2Normalization operator attributes to onnx's LpNormalization operator and return the created node.
Map MXNet's L2Normalization operator attributes to onnx's LpNormalization operator and return the created node.
def convert_l2normalization(node, **kwargs): """Map MXNet's L2Normalization operator attributes to onnx's LpNormalization operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mode = attrs.get("mode", "instance") if mode != "channel": raise AttributeError("L2Normalization: ONNX currently supports channel mode only") l2norm_node = onnx.helper.make_node( "LpNormalization", input_nodes, [name], axis=1, # channel only name=name ) return [l2norm_node]
[ "def", "convert_l2normalization", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mode", "=", "attrs", ".", "get", "(", "\"mode\"", ",", "\"instance\"", ")", "if", "mode", "!=", "\"channel\"", ":", "raise", "AttributeError", "(", "\"L2Normalization: ONNX currently supports channel mode only\"", ")", "l2norm_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"LpNormalization\"", ",", "input_nodes", ",", "[", "name", "]", ",", "axis", "=", "1", ",", "# channel only", "name", "=", "name", ")", "return", "[", "l2norm_node", "]" ]
[ 1542, 0 ]
[ 1560, 24 ]
python
en
['en', 'co', 'en']
True
convert_dropout
(node, **kwargs)
Map MXNet's Dropout operator attributes to onnx's Dropout operator and return the created node.
Map MXNet's Dropout operator attributes to onnx's Dropout operator and return the created node.
def convert_dropout(node, **kwargs): """Map MXNet's Dropout operator attributes to onnx's Dropout operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) probability = float(attrs.get("p", 0.5)) probability = np.array(probability, dtype=np.float32) training_mode = False training_mode = np.array(training_mode, dtype=np.bool) node_ratio = create_helper_tensor_node(probability, name + '_ratio', kwargs) node_ratio = create_helper_tensor_node(training_mode, name + '_mode', kwargs) dropout_node = onnx.helper.make_node( "Dropout", [input_nodes[0], name + '_ratio', name + '_mode'], [name], name=name ) return [dropout_node]
[ "def", "convert_dropout", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "probability", "=", "float", "(", "attrs", ".", "get", "(", "\"p\"", ",", "0.5", ")", ")", "probability", "=", "np", ".", "array", "(", "probability", ",", "dtype", "=", "np", ".", "float32", ")", "training_mode", "=", "False", "training_mode", "=", "np", ".", "array", "(", "training_mode", ",", "dtype", "=", "np", ".", "bool", ")", "node_ratio", "=", "create_helper_tensor_node", "(", "probability", ",", "name", "+", "'_ratio'", ",", "kwargs", ")", "node_ratio", "=", "create_helper_tensor_node", "(", "training_mode", ",", "name", "+", "'_mode'", ",", "kwargs", ")", "dropout_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Dropout\"", ",", "[", "input_nodes", "[", "0", "]", ",", "name", "+", "'_ratio'", ",", "name", "+", "'_mode'", "]", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "dropout_node", "]" ]
[ 1582, 0 ]
[ 1604, 25 ]
python
en
['en', 'en', 'en']
True
convert_flatten
(node, **kwargs)
Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node.
Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node.
def convert_flatten(node, **kwargs): """Map MXNet's Flatten operator attributes to onnx's Flatten operator and return the created node. """ return create_basic_op_node('Flatten', node, kwargs)
[ "def", "convert_flatten", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Flatten'", ",", "node", ",", "kwargs", ")" ]
[ 1607, 0 ]
[ 1611, 56 ]
python
en
['en', 'fi', 'en']
True
convert_clip
(node, **kwargs)
Map MXNet's Clip operator attributes to onnx's Clip operator and return the created node.
Map MXNet's Clip operator attributes to onnx's Clip operator and return the created node.
def convert_clip(node, **kwargs): """Map MXNet's Clip operator attributes to onnx's Clip operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) a_min = np.float(attrs.get('a_min', -np.inf)) a_max = np.float(attrs.get('a_max', np.inf)) clip_node = onnx.helper.make_node( "Clip", input_nodes, [name], name=name, min=a_min, max=a_max ) return [clip_node]
[ "def", "convert_clip", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "a_min", "=", "np", ".", "float", "(", "attrs", ".", "get", "(", "'a_min'", ",", "-", "np", ".", "inf", ")", ")", "a_max", "=", "np", ".", "float", "(", "attrs", ".", "get", "(", "'a_max'", ",", "np", ".", "inf", ")", ")", "clip_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Clip\"", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ",", "min", "=", "a_min", ",", "max", "=", "a_max", ")", "return", "[", "clip_node", "]" ]
[ 1614, 0 ]
[ 1631, 22 ]
python
en
['en', 'en', 'en']
True
scalar_op_helper
(node, op_name, **kwargs)
Helper function for scalar arithmetic operations
Helper function for scalar arithmetic operations
def scalar_op_helper(node, op_name, **kwargs): """Helper function for scalar arithmetic operations""" name, input_nodes, attrs = get_inputs(node, kwargs) from onnx import numpy_helper input_type = kwargs["in_type"] scalar_value = np.array([attrs.get("scalar", 1)], dtype=onnx.mapping.TENSOR_TYPE_TO_NP_TYPE[input_type]) initializer = kwargs["initializer"] flag = True # If the input value is in initializer, just multiply with scalar input # and create a new initializer for i in initializer: if i.name == input_nodes[0]: if op_name == 'Mul': new_initializer = numpy_helper.to_array(i) * scalar_value[0] elif op_name == 'Sub': if name.startswith("_rminusscalar"): new_initializer = scalar_value[0] - numpy_helper.to_array(i) else: new_initializer = numpy_helper.to_array(i) - scalar_value[0] elif op_name == 'Add': new_initializer = numpy_helper.to_array(i) + scalar_value[0] elif op_name == 'Div': if name.startswith("_rdivscalar"): new_initializer = scalar_value[0] / numpy_helper.to_array(i) else: new_initializer = numpy_helper.to_array(i) / scalar_value[0] elif op_name == 'Pow': new_initializer = numpy_helper.to_array(i) ** scalar_value[0] flag = False break # else create a new tensor of the scalar value, add it in initializer if flag is True: dims = np.shape(scalar_value) scalar_op_name = "scalar_op" + str(kwargs["idx"]) tensor_node = onnx.helper.make_tensor_value_info(scalar_op_name, input_type, dims) initializer.append( onnx.helper.make_tensor( name=scalar_op_name, data_type=input_type, dims=dims, vals=scalar_value, raw=False, ) ) mul_node = onnx.helper.make_node( op_name, [input_nodes[0], scalar_op_name], [name], name=name ) return [tensor_node, mul_node] else: data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[new_initializer.dtype] dims = np.shape(new_initializer) new_a_node = input_nodes[0] + str(kwargs["idx"]) tensor_node = onnx.helper.make_tensor_value_info(new_a_node, data_type, dims) initializer.append( onnx.helper.make_tensor( name=new_a_node, data_type=data_type, dims=dims, vals=new_initializer, raw=False, ) ) return [tensor_node]
[ "def", "scalar_op_helper", "(", "node", ",", "op_name", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "from", "onnx", "import", "numpy_helper", "input_type", "=", "kwargs", "[", "\"in_type\"", "]", "scalar_value", "=", "np", ".", "array", "(", "[", "attrs", ".", "get", "(", "\"scalar\"", ",", "1", ")", "]", ",", "dtype", "=", "onnx", ".", "mapping", ".", "TENSOR_TYPE_TO_NP_TYPE", "[", "input_type", "]", ")", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "flag", "=", "True", "# If the input value is in initializer, just multiply with scalar input", "# and create a new initializer", "for", "i", "in", "initializer", ":", "if", "i", ".", "name", "==", "input_nodes", "[", "0", "]", ":", "if", "op_name", "==", "'Mul'", ":", "new_initializer", "=", "numpy_helper", ".", "to_array", "(", "i", ")", "*", "scalar_value", "[", "0", "]", "elif", "op_name", "==", "'Sub'", ":", "if", "name", ".", "startswith", "(", "\"_rminusscalar\"", ")", ":", "new_initializer", "=", "scalar_value", "[", "0", "]", "-", "numpy_helper", ".", "to_array", "(", "i", ")", "else", ":", "new_initializer", "=", "numpy_helper", ".", "to_array", "(", "i", ")", "-", "scalar_value", "[", "0", "]", "elif", "op_name", "==", "'Add'", ":", "new_initializer", "=", "numpy_helper", ".", "to_array", "(", "i", ")", "+", "scalar_value", "[", "0", "]", "elif", "op_name", "==", "'Div'", ":", "if", "name", ".", "startswith", "(", "\"_rdivscalar\"", ")", ":", "new_initializer", "=", "scalar_value", "[", "0", "]", "/", "numpy_helper", ".", "to_array", "(", "i", ")", "else", ":", "new_initializer", "=", "numpy_helper", ".", "to_array", "(", "i", ")", "/", "scalar_value", "[", "0", "]", "elif", "op_name", "==", "'Pow'", ":", "new_initializer", "=", "numpy_helper", ".", "to_array", "(", "i", ")", "**", "scalar_value", "[", "0", "]", "flag", "=", "False", "break", "# else create a new tensor of the scalar value, add it in initializer", "if", "flag", "is", "True", ":", "dims", "=", "np", ".", "shape", "(", "scalar_value", ")", "scalar_op_name", "=", "\"scalar_op\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "scalar_op_name", ",", "input_type", ",", "dims", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "scalar_op_name", ",", "data_type", "=", "input_type", ",", "dims", "=", "dims", ",", "vals", "=", "scalar_value", ",", "raw", "=", "False", ",", ")", ")", "mul_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "op_name", ",", "[", "input_nodes", "[", "0", "]", ",", "scalar_op_name", "]", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "tensor_node", ",", "mul_node", "]", "else", ":", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "new_initializer", ".", "dtype", "]", "dims", "=", "np", ".", "shape", "(", "new_initializer", ")", "new_a_node", "=", "input_nodes", "[", "0", "]", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "new_a_node", ",", "data_type", ",", "dims", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "new_a_node", ",", "data_type", "=", "data_type", ",", "dims", "=", "dims", ",", "vals", "=", "new_initializer", ",", "raw", "=", "False", ",", ")", ")", "return", "[", "tensor_node", "]" ]
[ 1634, 0 ]
[ 1708, 28 ]
python
en
['en', 'en', 'en']
True
convert_mul_scalar
(node, **kwargs)
Map MXNet's _mul_scalar operator attributes to onnx's Mul operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _mul_scalar operator attributes to onnx's Mul operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_mul_scalar(node, **kwargs): """Map MXNet's _mul_scalar operator attributes to onnx's Mul operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Mul', **kwargs)
[ "def", "convert_mul_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Mul'", ",", "*", "*", "kwargs", ")" ]
[ 1712, 0 ]
[ 1717, 50 ]
python
en
['en', 'en', 'en']
True
convert_minus_scalar
(node, **kwargs)
Map MXNet's _minus_scalar operator attributes to onnx's Minus operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _minus_scalar operator attributes to onnx's Minus operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_minus_scalar(node, **kwargs): """Map MXNet's _minus_scalar operator attributes to onnx's Minus operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Sub', **kwargs)
[ "def", "convert_minus_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Sub'", ",", "*", "*", "kwargs", ")" ]
[ 1722, 0 ]
[ 1727, 50 ]
python
en
['en', 'fi', 'en']
True
convert_rminus_scalar
(node, **kwargs)
Map MXNet's _rminus_scalar operator attributes to onnx's Sub operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _rminus_scalar operator attributes to onnx's Sub operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_rminus_scalar(node, **kwargs): """Map MXNet's _rminus_scalar operator attributes to onnx's Sub operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Sub', **kwargs)
[ "def", "convert_rminus_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Sub'", ",", "*", "*", "kwargs", ")" ]
[ 1730, 0 ]
[ 1735, 50 ]
python
en
['en', 'la', 'en']
True
convert_add_scalar
(node, **kwargs)
Map MXNet's _plus_scalar operator attributes to onnx's Add operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _plus_scalar operator attributes to onnx's Add operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_add_scalar(node, **kwargs): """Map MXNet's _plus_scalar operator attributes to onnx's Add operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Add', **kwargs)
[ "def", "convert_add_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Add'", ",", "*", "*", "kwargs", ")" ]
[ 1739, 0 ]
[ 1744, 50 ]
python
en
['en', 'en', 'en']
True
convert_div_scalar
(node, **kwargs)
Map MXNet's _div_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _div_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_div_scalar(node, **kwargs): """Map MXNet's _div_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Div', **kwargs)
[ "def", "convert_div_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Div'", ",", "*", "*", "kwargs", ")" ]
[ 1748, 0 ]
[ 1753, 50 ]
python
en
['en', 'en', 'en']
True
convert_rdiv_scalar
(node, **kwargs)
Map MXNet's _rdiv_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _rdiv_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_rdiv_scalar(node, **kwargs): """Map MXNet's _rdiv_scalar operator attributes to onnx's Div operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Div', **kwargs)
[ "def", "convert_rdiv_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Div'", ",", "*", "*", "kwargs", ")" ]
[ 1756, 0 ]
[ 1761, 50 ]
python
en
['en', 'en', 'en']
True
convert_pow_scalar
(node, **kwargs)
Map MXNet's _pow_scalar operator attributes to onnx's Pow operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
Map MXNet's _pow_scalar operator attributes to onnx's Pow operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes.
def convert_pow_scalar(node, **kwargs): """Map MXNet's _pow_scalar operator attributes to onnx's Pow operator. Creates a new node for the input scalar value, adds it to the initializer and return multiple created nodes. """ return scalar_op_helper(node, 'Pow', **kwargs)
[ "def", "convert_pow_scalar", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "scalar_op_helper", "(", "node", ",", "'Pow'", ",", "*", "*", "kwargs", ")" ]
[ 1764, 0 ]
[ 1769, 50 ]
python
en
['en', 'el-Latn', 'en']
True
convert_argmax
(node, **kwargs)
Map MXNet's argmax operator attributes to onnx's ArgMax operator and return the created node.
Map MXNet's argmax operator attributes to onnx's ArgMax operator and return the created node.
def convert_argmax(node, **kwargs): """Map MXNet's argmax operator attributes to onnx's ArgMax operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis")) keepdims = get_boolean_attribute_value(attrs, "keepdims") node = onnx.helper.make_node( 'ArgMax', inputs=input_nodes, axis=axis, keepdims=keepdims, outputs=[name], name=name ) return [node]
[ "def", "convert_argmax", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ")", ")", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ArgMax'", ",", "inputs", "=", "input_nodes", ",", "axis", "=", "axis", ",", "keepdims", "=", "keepdims", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1773, 0 ]
[ 1790, 17 ]
python
en
['en', 'en', 'en']
True
convert_argmin
(node, **kwargs)
Map MXNet's argmin operator attributes to onnx's ArgMin operator and return the created node.
Map MXNet's argmin operator attributes to onnx's ArgMin operator and return the created node.
def convert_argmin(node, **kwargs): """Map MXNet's argmin operator attributes to onnx's ArgMin operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis")) keepdims = get_boolean_attribute_value(attrs, "keepdims") node = onnx.helper.make_node( 'ArgMin', inputs=input_nodes, axis=axis, keepdims=keepdims, outputs=[name], name=name ) return [node]
[ "def", "convert_argmin", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ")", ")", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ArgMin'", ",", "inputs", "=", "input_nodes", ",", "axis", "=", "axis", ",", "keepdims", "=", "keepdims", ",", "outputs", "=", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1793, 0 ]
[ 1810, 17 ]
python
en
['en', 'en', 'en']
True
convert_maximum
(node, **kwargs)
Map MXNet's _maximum operator attributes to onnx's Max operator and return the created node.
Map MXNet's _maximum operator attributes to onnx's Max operator and return the created node.
def convert_maximum(node, **kwargs): """Map MXNet's _maximum operator attributes to onnx's Max operator and return the created node. """ return create_basic_op_node('Max', node, kwargs)
[ "def", "convert_maximum", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Max'", ",", "node", ",", "kwargs", ")" ]
[ 1813, 0 ]
[ 1817, 52 ]
python
en
['en', 'la', 'en']
True
convert_minimum
(node, **kwargs)
Map MXNet's _minimum operator attributes to onnx's Min operator and return the created node.
Map MXNet's _minimum operator attributes to onnx's Min operator and return the created node.
def convert_minimum(node, **kwargs): """Map MXNet's _minimum operator attributes to onnx's Min operator and return the created node. """ return create_basic_op_node('Min', node, kwargs)
[ "def", "convert_minimum", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Min'", ",", "node", ",", "kwargs", ")" ]
[ 1821, 0 ]
[ 1825, 52 ]
python
en
['en', 'mt', 'en']
True
convert_min
(node, **kwargs)
Map MXNet's min operator attributes to onnx's ReduceMin operator and return the created node.
Map MXNet's min operator attributes to onnx's ReduceMin operator and return the created node.
def convert_min(node, **kwargs): """Map MXNet's min operator attributes to onnx's ReduceMin operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mx_axis = attrs.get("axis", None) axes = convert_string_to_list(str(mx_axis)) if mx_axis is not None else None keepdims = get_boolean_attribute_value(attrs, "keepdims") if axes is not None: node = onnx.helper.make_node( 'ReduceMin', inputs=input_nodes, outputs=[name], axes=axes, keepdims=keepdims, name=name ) return [node] else: node = onnx.helper.make_node( 'ReduceMin', inputs=input_nodes, outputs=[name], keepdims=keepdims, name=name ) return [node]
[ "def", "convert_min", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mx_axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "axes", "=", "convert_string_to_list", "(", "str", "(", "mx_axis", ")", ")", "if", "mx_axis", "is", "not", "None", "else", "None", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "if", "axes", "is", "not", "None", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMin'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "axes", "=", "axes", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMin'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1828, 0 ]
[ 1859, 21 ]
python
en
['en', 'en', 'en']
True
convert_max
(node, **kwargs)
Map MXNet's max operator attributes to onnx's ReduceMax operator and return the created node.
Map MXNet's max operator attributes to onnx's ReduceMax operator and return the created node.
def convert_max(node, **kwargs): """Map MXNet's max operator attributes to onnx's ReduceMax operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mx_axis = attrs.get("axis", None) axes = convert_string_to_list(str(mx_axis)) if mx_axis is not None else None keepdims = get_boolean_attribute_value(attrs, "keepdims") if axes is not None: node = onnx.helper.make_node( 'ReduceMax', inputs=input_nodes, outputs=[name], axes=axes, keepdims=keepdims, name=name ) return [node] else: node = onnx.helper.make_node( 'ReduceMax', inputs=input_nodes, outputs=[name], keepdims=keepdims, name=name ) return [node]
[ "def", "convert_max", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mx_axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "axes", "=", "convert_string_to_list", "(", "str", "(", "mx_axis", ")", ")", "if", "mx_axis", "is", "not", "None", "else", "None", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "if", "axes", "is", "not", "None", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMax'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "axes", "=", "axes", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMax'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1863, 0 ]
[ 1894, 21 ]
python
en
['en', 'en', 'en']
True
convert_mean
(node, **kwargs)
Map MXNet's mean operator attributes to onnx's ReduceMean operator and return the created node.
Map MXNet's mean operator attributes to onnx's ReduceMean operator and return the created node.
def convert_mean(node, **kwargs): """Map MXNet's mean operator attributes to onnx's ReduceMean operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mx_axis = attrs.get("axis", None) axes = convert_string_to_list(str(mx_axis)) if mx_axis is not None else None keepdims = get_boolean_attribute_value(attrs, "keepdims") if axes is not None: node = onnx.helper.make_node( 'ReduceMean', inputs=input_nodes, outputs=[name], axes=axes, keepdims=keepdims, name=name ) return [node] else: node = onnx.helper.make_node( 'ReduceMean', inputs=input_nodes, outputs=[name], keepdims=keepdims, name=name ) return [node]
[ "def", "convert_mean", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mx_axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "axes", "=", "convert_string_to_list", "(", "str", "(", "mx_axis", ")", ")", "if", "mx_axis", "is", "not", "None", "else", "None", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "if", "axes", "is", "not", "None", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMean'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "axes", "=", "axes", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceMean'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1898, 0 ]
[ 1929, 21 ]
python
en
['en', 'en', 'en']
True
convert_prod
(node, **kwargs)
Map MXNet's prod operator attributes to onnx's ReduceProd operator and return the created node.
Map MXNet's prod operator attributes to onnx's ReduceProd operator and return the created node.
def convert_prod(node, **kwargs): """Map MXNet's prod operator attributes to onnx's ReduceProd operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mx_axis = attrs.get("axis", None) axes = convert_string_to_list(str(mx_axis)) if mx_axis is not None else None keepdims = get_boolean_attribute_value(attrs, "keepdims") if axes is not None: node = onnx.helper.make_node( 'ReduceProd', inputs=input_nodes, outputs=[name], axes=axes, keepdims=keepdims, name=name ) return [node] else: node = onnx.helper.make_node( 'ReduceProd', inputs=input_nodes, outputs=[name], keepdims=keepdims, name=name ) return [node]
[ "def", "convert_prod", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mx_axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "axes", "=", "convert_string_to_list", "(", "str", "(", "mx_axis", ")", ")", "if", "mx_axis", "is", "not", "None", "else", "None", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "if", "axes", "is", "not", "None", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceProd'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "axes", "=", "axes", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceProd'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 1933, 0 ]
[ 1964, 21 ]
python
en
['en', 'en', 'en']
True
convert_elementwise_add
(node, **kwargs)
Map MXNet's elemwise_add operator attributes to onnx's Add operator and return the created node.
Map MXNet's elemwise_add operator attributes to onnx's Add operator and return the created node.
def convert_elementwise_add(node, **kwargs): """Map MXNet's elemwise_add operator attributes to onnx's Add operator and return the created node. """ return create_basic_op_node('Add', node, kwargs)
[ "def", "convert_elementwise_add", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Add'", ",", "node", ",", "kwargs", ")" ]
[ 1969, 0 ]
[ 1973, 52 ]
python
en
['en', 'en', 'en']
True
covert_broadcast_add
(node, **kwargs)
Map MXNet's broadcast_add operator attributes to onnx's Add operator and return the created node.
Map MXNet's broadcast_add operator attributes to onnx's Add operator and return the created node.
def covert_broadcast_add(node, **kwargs): """Map MXNet's broadcast_add operator attributes to onnx's Add operator and return the created node. """ return create_basic_op_node('Add', node, kwargs)
[ "def", "covert_broadcast_add", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Add'", ",", "node", ",", "kwargs", ")" ]
[ 1977, 0 ]
[ 1981, 52 ]
python
en
['en', 'en', 'en']
True
convert_elementwise_sub
(node, **kwargs)
Map MXNet's elemwise_sub operator attributes to onnx's Sub operator and return the created node.
Map MXNet's elemwise_sub operator attributes to onnx's Sub operator and return the created node.
def convert_elementwise_sub(node, **kwargs): """Map MXNet's elemwise_sub operator attributes to onnx's Sub operator and return the created node. """ return create_basic_op_node('Sub', node, kwargs)
[ "def", "convert_elementwise_sub", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sub'", ",", "node", ",", "kwargs", ")" ]
[ 1985, 0 ]
[ 1989, 52 ]
python
en
['en', 'en', 'en']
True
covert_broadcast_sub
(node, **kwargs)
Map MXNet's broadcast_sub operator attributes to onnx's Sub operator and return the created node.
Map MXNet's broadcast_sub operator attributes to onnx's Sub operator and return the created node.
def covert_broadcast_sub(node, **kwargs): """Map MXNet's broadcast_sub operator attributes to onnx's Sub operator and return the created node. """ return create_basic_op_node('Sub', node, kwargs)
[ "def", "covert_broadcast_sub", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sub'", ",", "node", ",", "kwargs", ")" ]
[ 1992, 0 ]
[ 1996, 52 ]
python
en
['en', 'en', 'en']
True
convert_elemwise_mul
(node, **kwargs)
Map MXNet's elemwise_mul operator attributes to onnx's Mul operator and return the created node.
Map MXNet's elemwise_mul operator attributes to onnx's Mul operator and return the created node.
def convert_elemwise_mul(node, **kwargs): """Map MXNet's elemwise_mul operator attributes to onnx's Mul operator and return the created node. """ return create_basic_op_node('Mul', node, kwargs)
[ "def", "convert_elemwise_mul", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Mul'", ",", "node", ",", "kwargs", ")" ]
[ 1999, 0 ]
[ 2003, 52 ]
python
en
['en', 'en', 'en']
True
convert_broadcast_mul
(node, **kwargs)
Map MXNet's broadcast_mul operator attributes to onnx's Mul operator and return the created node.
Map MXNet's broadcast_mul operator attributes to onnx's Mul operator and return the created node.
def convert_broadcast_mul(node, **kwargs): """Map MXNet's broadcast_mul operator attributes to onnx's Mul operator and return the created node. """ return create_basic_op_node('Mul', node, kwargs)
[ "def", "convert_broadcast_mul", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Mul'", ",", "node", ",", "kwargs", ")" ]
[ 2006, 0 ]
[ 2010, 52 ]
python
en
['en', 'en', 'en']
True
convert_elemwise_div
(node, **kwargs)
Map MXNet's elemwise_div operator attributes to onnx's Div operator and return the created node.
Map MXNet's elemwise_div operator attributes to onnx's Div operator and return the created node.
def convert_elemwise_div(node, **kwargs): """Map MXNet's elemwise_div operator attributes to onnx's Div operator and return the created node. """ return create_basic_op_node('Div', node, kwargs)
[ "def", "convert_elemwise_div", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Div'", ",", "node", ",", "kwargs", ")" ]
[ 2013, 0 ]
[ 2017, 52 ]
python
en
['en', 'en', 'en']
True
convert_broadcast_div
(node, **kwargs)
Map MXNet's broadcast_div operator attributes to onnx's Div operator and return the created node.
Map MXNet's broadcast_div operator attributes to onnx's Div operator and return the created node.
def convert_broadcast_div(node, **kwargs): """Map MXNet's broadcast_div operator attributes to onnx's Div operator and return the created node. """ return create_basic_op_node('Div', node, kwargs)
[ "def", "convert_broadcast_div", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Div'", ",", "node", ",", "kwargs", ")" ]
[ 2020, 0 ]
[ 2024, 52 ]
python
en
['en', 'en', 'en']
True
convert_negative
(node, **kwargs)
Map MXNet's negative operator attributes to onnx's Neg operator and return the created node.
Map MXNet's negative operator attributes to onnx's Neg operator and return the created node.
def convert_negative(node, **kwargs): """Map MXNet's negative operator attributes to onnx's Neg operator and return the created node. """ return create_basic_op_node('Neg', node, kwargs)
[ "def", "convert_negative", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Neg'", ",", "node", ",", "kwargs", ")" ]
[ 2027, 0 ]
[ 2031, 52 ]
python
en
['en', 'en', 'en']
True
convert_abs
(node, **kwargs)
Map MXNet's abs operator attributes to onnx's Abs operator and return the created node.
Map MXNet's abs operator attributes to onnx's Abs operator and return the created node.
def convert_abs(node, **kwargs): """Map MXNet's abs operator attributes to onnx's Abs operator and return the created node. """ return create_basic_op_node('Abs', node, kwargs)
[ "def", "convert_abs", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Abs'", ",", "node", ",", "kwargs", ")" ]
[ 2034, 0 ]
[ 2038, 52 ]
python
en
['en', 'en', 'en']
True
convert_addn
(node, **kwargs)
Map MXNet's add_n operator attributes to onnx's Sum operator and return the created node.
Map MXNet's add_n operator attributes to onnx's Sum operator and return the created node.
def convert_addn(node, **kwargs): """Map MXNet's add_n operator attributes to onnx's Sum operator and return the created node. """ return create_basic_op_node('Sum', node, kwargs)
[ "def", "convert_addn", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sum'", ",", "node", ",", "kwargs", ")" ]
[ 2041, 0 ]
[ 2045, 52 ]
python
en
['en', 'en', 'en']
True
convert_ceil
(node, **kwargs)
Map MXNet's ceil operator attributes to onnx's Ceil operator and return the created node.
Map MXNet's ceil operator attributes to onnx's Ceil operator and return the created node.
def convert_ceil(node, **kwargs): """Map MXNet's ceil operator attributes to onnx's Ceil operator and return the created node. """ return create_basic_op_node('Ceil', node, kwargs)
[ "def", "convert_ceil", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Ceil'", ",", "node", ",", "kwargs", ")" ]
[ 2049, 0 ]
[ 2053, 53 ]
python
en
['en', 'en', 'en']
True
convert_floor
(node, **kwargs)
Map MXNet's floor operator attributes to onnx's Floor operator and return the created node.
Map MXNet's floor operator attributes to onnx's Floor operator and return the created node.
def convert_floor(node, **kwargs): """Map MXNet's floor operator attributes to onnx's Floor operator and return the created node. """ return create_basic_op_node('Floor', node, kwargs)
[ "def", "convert_floor", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Floor'", ",", "node", ",", "kwargs", ")" ]
[ 2056, 0 ]
[ 2060, 54 ]
python
en
['en', 'nl', 'en']
True
convert_reshape
(node, **kwargs)
Map MXNet's Reshape operator attributes to onnx's Reshape operator. Converts output shape attribute to output shape tensor and return multiple created nodes.
Map MXNet's Reshape operator attributes to onnx's Reshape operator. Converts output shape attribute to output shape tensor and return multiple created nodes.
def convert_reshape(node, **kwargs): """Map MXNet's Reshape operator attributes to onnx's Reshape operator. Converts output shape attribute to output shape tensor and return multiple created nodes. """ name, input_nodes, attrs = get_inputs(node, kwargs) output_shape_list = convert_string_to_list(attrs["shape"]) initializer = kwargs["initializer"] output_shape_np = np.array(output_shape_list, dtype='int64') data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[output_shape_np.dtype] dims = np.shape(output_shape_np) output_shape_name = "reshape_attr_tensor" + str(kwargs["idx"]) tensor_node = onnx.helper.make_tensor_value_info(output_shape_name, data_type, dims) initializer.append( onnx.helper.make_tensor( name=output_shape_name, data_type=data_type, dims=dims, vals=output_shape_list, raw=False, ) ) input_nodes.append(output_shape_name) not_supported_shape = [-2, -3, -4] for val in output_shape_list: if val in not_supported_shape: raise AttributeError("Reshape: Shape value not supported in ONNX", val) reshape_node = onnx.helper.make_node( "Reshape", input_nodes, [name], name=name ) return [tensor_node, reshape_node]
[ "def", "convert_reshape", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "output_shape_list", "=", "convert_string_to_list", "(", "attrs", "[", "\"shape\"", "]", ")", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "output_shape_np", "=", "np", ".", "array", "(", "output_shape_list", ",", "dtype", "=", "'int64'", ")", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "output_shape_np", ".", "dtype", "]", "dims", "=", "np", ".", "shape", "(", "output_shape_np", ")", "output_shape_name", "=", "\"reshape_attr_tensor\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "output_shape_name", ",", "data_type", ",", "dims", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "output_shape_name", ",", "data_type", "=", "data_type", ",", "dims", "=", "dims", ",", "vals", "=", "output_shape_list", ",", "raw", "=", "False", ",", ")", ")", "input_nodes", ".", "append", "(", "output_shape_name", ")", "not_supported_shape", "=", "[", "-", "2", ",", "-", "3", ",", "-", "4", "]", "for", "val", "in", "output_shape_list", ":", "if", "val", "in", "not_supported_shape", ":", "raise", "AttributeError", "(", "\"Reshape: Shape value not supported in ONNX\"", ",", "val", ")", "reshape_node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Reshape\"", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "tensor_node", ",", "reshape_node", "]" ]
[ 2064, 0 ]
[ 2106, 38 ]
python
en
['en', 'en', 'en']
True
convert_cast
(node, **kwargs)
Map MXNet's Cast operator attributes to onnx's Cast operator and return the created node.
Map MXNet's Cast operator attributes to onnx's Cast operator and return the created node.
def convert_cast(node, **kwargs): """Map MXNet's Cast operator attributes to onnx's Cast operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) dtype = attrs["dtype"] # dtype can be mapped only with types from TensorProto # float32 is mapped to float and float64 to double in onnx # following tensorproto mapping https://github.com/onnx/onnx/blob/master/onnx/mapping.py if dtype == 'float32': dtype = 'float' elif dtype == 'float64': dtype = 'double' node = onnx.helper.make_node( "Cast", input_nodes, [name], to=getattr(onnx.TensorProto, dtype.upper()), name=name, ) return [node]
[ "def", "convert_cast", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "dtype", "=", "attrs", "[", "\"dtype\"", "]", "# dtype can be mapped only with types from TensorProto", "# float32 is mapped to float and float64 to double in onnx", "# following tensorproto mapping https://github.com/onnx/onnx/blob/master/onnx/mapping.py", "if", "dtype", "==", "'float32'", ":", "dtype", "=", "'float'", "elif", "dtype", "==", "'float64'", ":", "dtype", "=", "'double'", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Cast\"", ",", "input_nodes", ",", "[", "name", "]", ",", "to", "=", "getattr", "(", "onnx", ".", "TensorProto", ",", "dtype", ".", "upper", "(", ")", ")", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]" ]
[ 2109, 0 ]
[ 2132, 17 ]
python
en
['en', 'en', 'en']
True
convert_slice_axis
(node, **kwargs)
Map MXNet's slice_axis operator attributes to onnx's Slice operator and return the created node.
Map MXNet's slice_axis operator attributes to onnx's Slice operator and return the created node.
def convert_slice_axis(node, **kwargs): """Map MXNet's slice_axis operator attributes to onnx's Slice operator and return the created node. """ name, input_nodes, input_shapes, attrs = get_inputs(node, kwargs, with_shapes=True) axes = int(attrs.get("axis")) starts = int(attrs.get("begin")) ends = attrs.get("end", None) if not ends or ends == 'None': # ONNX doesn't support None for ends. Since ends=None depicts # length of dimension, passing dimension in this case. in_shape = input_shapes[0] ends = in_shape[axes] export_nodes = [] starts = np.atleast_1d(np.asarray(starts, dtype=np.int)) ends = np.atleast_1d(np.asarray(ends, dtype=np.int)) axes = np.atleast_1d(np.asarray(axes, dtype=np.int)) starts_node = create_helper_tensor_node(starts, name + '__starts', kwargs) export_nodes.extend(starts_node) starts_node = starts_node[-1].name ends_node = create_helper_tensor_node(ends, name + '__ends', kwargs) export_nodes.extend(ends_node) ends_node = ends_node[-1].name axes_node = create_helper_tensor_node(axes, name + '__axes', kwargs) export_nodes.extend(axes_node) axes_node = axes_node[-1].name input_node = input_nodes[0] node = onnx.helper.make_node( "Slice", [input_node, starts_node, ends_node, axes_node], [name], name=name, ) export_nodes.extend([node]) return export_nodes
[ "def", "convert_slice_axis", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "input_shapes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ",", "with_shapes", "=", "True", ")", "axes", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ")", ")", "starts", "=", "int", "(", "attrs", ".", "get", "(", "\"begin\"", ")", ")", "ends", "=", "attrs", ".", "get", "(", "\"end\"", ",", "None", ")", "if", "not", "ends", "or", "ends", "==", "'None'", ":", "# ONNX doesn't support None for ends. Since ends=None depicts", "# length of dimension, passing dimension in this case.", "in_shape", "=", "input_shapes", "[", "0", "]", "ends", "=", "in_shape", "[", "axes", "]", "export_nodes", "=", "[", "]", "starts", "=", "np", ".", "atleast_1d", "(", "np", ".", "asarray", "(", "starts", ",", "dtype", "=", "np", ".", "int", ")", ")", "ends", "=", "np", ".", "atleast_1d", "(", "np", ".", "asarray", "(", "ends", ",", "dtype", "=", "np", ".", "int", ")", ")", "axes", "=", "np", ".", "atleast_1d", "(", "np", ".", "asarray", "(", "axes", ",", "dtype", "=", "np", ".", "int", ")", ")", "starts_node", "=", "create_helper_tensor_node", "(", "starts", ",", "name", "+", "'__starts'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "starts_node", ")", "starts_node", "=", "starts_node", "[", "-", "1", "]", ".", "name", "ends_node", "=", "create_helper_tensor_node", "(", "ends", ",", "name", "+", "'__ends'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "ends_node", ")", "ends_node", "=", "ends_node", "[", "-", "1", "]", ".", "name", "axes_node", "=", "create_helper_tensor_node", "(", "axes", ",", "name", "+", "'__axes'", ",", "kwargs", ")", "export_nodes", ".", "extend", "(", "axes_node", ")", "axes_node", "=", "axes_node", "[", "-", "1", "]", ".", "name", "input_node", "=", "input_nodes", "[", "0", "]", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Slice\"", ",", "[", "input_node", ",", "starts_node", ",", "ends_node", ",", "axes_node", "]", ",", "[", "name", "]", ",", "name", "=", "name", ",", ")", "export_nodes", ".", "extend", "(", "[", "node", "]", ")", "return", "export_nodes" ]
[ 2136, 0 ]
[ 2178, 23 ]
python
en
['en', 'en', 'en']
True
convert_slice_channel
(node, **kwargs)
Map MXNet's SliceChannel operator attributes to onnx's Squeeze or Split operator based on squeeze_axis attribute and return the created node.
Map MXNet's SliceChannel operator attributes to onnx's Squeeze or Split operator based on squeeze_axis attribute and return the created node.
def convert_slice_channel(node, **kwargs): """Map MXNet's SliceChannel operator attributes to onnx's Squeeze or Split operator based on squeeze_axis attribute and return the created node. """ name, input_nodes, input_shapes, attrs = get_inputs(node, kwargs, with_shapes=True) num_outputs = int(attrs.get("num_outputs")) axis = int(attrs.get("axis", 1)) squeeze_axis = int(attrs.get("squeeze_axis", 0)) if squeeze_axis == 1 and num_outputs == 1: node = onnx.helper.make_node( "Squeeze", input_nodes, [name], axes=[axis], name=name, ) return [node] elif squeeze_axis == 0 and num_outputs > 1: in_shape = input_shapes[0] split = in_shape[axis] // num_outputs node = onnx.helper.make_node( "Split", input_nodes, [name+'_output'+str(i) for i in range(num_outputs)], axis=axis, split=[split for _ in range(num_outputs)], name=name, ) return [node] else: raise NotImplementedError("SliceChannel operator with num_outputs>1 and" "squeeze_axis true is not implemented.")
[ "def", "convert_slice_channel", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "input_shapes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ",", "with_shapes", "=", "True", ")", "num_outputs", "=", "int", "(", "attrs", ".", "get", "(", "\"num_outputs\"", ")", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ",", "1", ")", ")", "squeeze_axis", "=", "int", "(", "attrs", ".", "get", "(", "\"squeeze_axis\"", ",", "0", ")", ")", "if", "squeeze_axis", "==", "1", "and", "num_outputs", "==", "1", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Squeeze\"", ",", "input_nodes", ",", "[", "name", "]", ",", "axes", "=", "[", "axis", "]", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]", "elif", "squeeze_axis", "==", "0", "and", "num_outputs", ">", "1", ":", "in_shape", "=", "input_shapes", "[", "0", "]", "split", "=", "in_shape", "[", "axis", "]", "//", "num_outputs", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Split\"", ",", "input_nodes", ",", "[", "name", "+", "'_output'", "+", "str", "(", "i", ")", "for", "i", "in", "range", "(", "num_outputs", ")", "]", ",", "axis", "=", "axis", ",", "split", "=", "[", "split", "for", "_", "in", "range", "(", "num_outputs", ")", "]", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]", "else", ":", "raise", "NotImplementedError", "(", "\"SliceChannel operator with num_outputs>1 and\"", "\"squeeze_axis true is not implemented.\"", ")" ]
[ 2182, 0 ]
[ 2216, 74 ]
python
en
['en', 'en', 'en']
True
convert_expand_dims
(node, **kwargs)
Map MXNet's expand_dims operator attributes to onnx's Unsqueeze operator and return the created node.
Map MXNet's expand_dims operator attributes to onnx's Unsqueeze operator and return the created node.
def convert_expand_dims(node, **kwargs): """Map MXNet's expand_dims operator attributes to onnx's Unsqueeze operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = int(attrs.get("axis")) node = onnx.helper.make_node( "Unsqueeze", input_nodes, [name], axes=[axis], name=name, ) return [node]
[ "def", "convert_expand_dims", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "int", "(", "attrs", ".", "get", "(", "\"axis\"", ")", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Unsqueeze\"", ",", "input_nodes", ",", "[", "name", "]", ",", "axes", "=", "[", "axis", "]", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]" ]
[ 2220, 0 ]
[ 2235, 17 ]
python
en
['en', 'en', 'en']
True
convert_squeeze
(node, **kwargs)
Map MXNet's squeeze operator attributes to onnx's squeeze operator and return the created node.
Map MXNet's squeeze operator attributes to onnx's squeeze operator and return the created node.
def convert_squeeze(node, **kwargs): """Map MXNet's squeeze operator attributes to onnx's squeeze operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) axis = attrs.get("axis", None) if not axis: raise AttributeError("Squeeze: Missing axis attribute: ONNX currently requires axis to " "be specified for squeeze operator") axis = convert_string_to_list(axis) node = onnx.helper.make_node( "Squeeze", input_nodes, [name], axes=axis, name=name, ) return [node]
[ "def", "convert_squeeze", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "if", "not", "axis", ":", "raise", "AttributeError", "(", "\"Squeeze: Missing axis attribute: ONNX currently requires axis to \"", "\"be specified for squeeze operator\"", ")", "axis", "=", "convert_string_to_list", "(", "axis", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Squeeze\"", ",", "input_nodes", ",", "[", "name", "]", ",", "axes", "=", "axis", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]" ]
[ 2238, 0 ]
[ 2257, 17 ]
python
en
['en', 'en', 'en']
True
convert_log
(node, **kwargs)
Map MXNet's log operator attributes to onnx's Log operator and return the created node.
Map MXNet's log operator attributes to onnx's Log operator and return the created node.
def convert_log(node, **kwargs): """Map MXNet's log operator attributes to onnx's Log operator and return the created node. """ return create_basic_op_node('Log', node, kwargs)
[ "def", "convert_log", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Log'", ",", "node", ",", "kwargs", ")" ]
[ 2261, 0 ]
[ 2265, 52 ]
python
en
['en', 'en', 'en']
True
convert_reciprocal
(node, **kwargs)
Map MXNet's reciprocal operator attributes to onnx's Reciprocal operator and return the created node.
Map MXNet's reciprocal operator attributes to onnx's Reciprocal operator and return the created node.
def convert_reciprocal(node, **kwargs): """Map MXNet's reciprocal operator attributes to onnx's Reciprocal operator and return the created node. """ return create_basic_op_node('Reciprocal', node, kwargs)
[ "def", "convert_reciprocal", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Reciprocal'", ",", "node", ",", "kwargs", ")" ]
[ 2268, 0 ]
[ 2272, 59 ]
python
en
['en', 'en', 'en']
True
convert_power
(node, **kwargs)
Map MXNet's _power operator attributes to onnx's Pow operator and return the created node.
Map MXNet's _power operator attributes to onnx's Pow operator and return the created node.
def convert_power(node, **kwargs): """Map MXNet's _power operator attributes to onnx's Pow operator and return the created node. """ return create_basic_op_node('Pow', node, kwargs)
[ "def", "convert_power", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Pow'", ",", "node", ",", "kwargs", ")" ]
[ 2275, 0 ]
[ 2279, 52 ]
python
en
['en', 'en', 'en']
True
convert_broadcast_power
(node, **kwargs)
Map MXNet's _power operator attributes to onnx's Pow operator and return the created node.
Map MXNet's _power operator attributes to onnx's Pow operator and return the created node.
def convert_broadcast_power(node, **kwargs): """Map MXNet's _power operator attributes to onnx's Pow operator and return the created node. """ return create_basic_op_node('Pow', node, kwargs)
[ "def", "convert_broadcast_power", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Pow'", ",", "node", ",", "kwargs", ")" ]
[ 2282, 0 ]
[ 2286, 52 ]
python
en
['en', 'en', 'en']
True
convert_sqrt
(node, **kwargs)
Map MXNet's sqrt operator attributes to onnx's Sqrt operator and return the created node.
Map MXNet's sqrt operator attributes to onnx's Sqrt operator and return the created node.
def convert_sqrt(node, **kwargs): """Map MXNet's sqrt operator attributes to onnx's Sqrt operator and return the created node. """ return create_basic_op_node('Sqrt', node, kwargs)
[ "def", "convert_sqrt", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Sqrt'", ",", "node", ",", "kwargs", ")" ]
[ 2289, 0 ]
[ 2293, 53 ]
python
en
['en', 'mt', 'en']
True
convert_depthtospace
(node, **kwargs)
Map MXNet's depth_to_space operator attributes to onnx's DepthToSpace operator and return the created node.
Map MXNet's depth_to_space operator attributes to onnx's DepthToSpace operator and return the created node.
def convert_depthtospace(node, **kwargs): """Map MXNet's depth_to_space operator attributes to onnx's DepthToSpace operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) blksize = int(attrs.get("block_size", 0)) node = onnx.helper.make_node( "DepthToSpace", input_nodes, [name], blocksize=blksize, name=name, ) return [node]
[ "def", "convert_depthtospace", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "blksize", "=", "int", "(", "attrs", ".", "get", "(", "\"block_size\"", ",", "0", ")", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"DepthToSpace\"", ",", "input_nodes", ",", "[", "name", "]", ",", "blocksize", "=", "blksize", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]" ]
[ 2296, 0 ]
[ 2311, 17 ]
python
en
['en', 'en', 'en']
True
convert_spacetodepth
(node, **kwargs)
Map MXNet's space_to_depth operator attributes to onnx's SpaceToDepth operator and return the created node.
Map MXNet's space_to_depth operator attributes to onnx's SpaceToDepth operator and return the created node.
def convert_spacetodepth(node, **kwargs): """Map MXNet's space_to_depth operator attributes to onnx's SpaceToDepth operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) blksize = int(attrs.get("block_size", 0)) node = onnx.helper.make_node( "SpaceToDepth", input_nodes, [name], blocksize=blksize, name=name, ) return [node]
[ "def", "convert_spacetodepth", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "blksize", "=", "int", "(", "attrs", ".", "get", "(", "\"block_size\"", ",", "0", ")", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"SpaceToDepth\"", ",", "input_nodes", ",", "[", "name", "]", ",", "blocksize", "=", "blksize", ",", "name", "=", "name", ",", ")", "return", "[", "node", "]" ]
[ 2314, 0 ]
[ 2329, 17 ]
python
en
['en', 'en', 'en']
True
convert_square
(node, **kwargs)
Map MXNet's square operator attributes to onnx's Pow operator and return the created node.
Map MXNet's square operator attributes to onnx's Pow operator and return the created node.
def convert_square(node, **kwargs): """Map MXNet's square operator attributes to onnx's Pow operator and return the created node. """ name, input_nodes, _ = get_inputs(node, kwargs) initializer = kwargs["initializer"] data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('int64')] power2_name = "square_tensor" + str(kwargs["idx"]) tensor_node = onnx.helper.make_tensor_value_info(power2_name, data_type, (1,)) initializer.append( onnx.helper.make_tensor( name=power2_name, data_type=data_type, dims=(1,), vals=[2], raw=False, ) ) input_nodes.append(power2_name) node = onnx.helper.make_node( "Pow", input_nodes, [name], name=name ) return [tensor_node, node]
[ "def", "convert_square", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "_", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "initializer", "=", "kwargs", "[", "\"initializer\"", "]", "data_type", "=", "onnx", ".", "mapping", ".", "NP_TYPE_TO_TENSOR_TYPE", "[", "np", ".", "dtype", "(", "'int64'", ")", "]", "power2_name", "=", "\"square_tensor\"", "+", "str", "(", "kwargs", "[", "\"idx\"", "]", ")", "tensor_node", "=", "onnx", ".", "helper", ".", "make_tensor_value_info", "(", "power2_name", ",", "data_type", ",", "(", "1", ",", ")", ")", "initializer", ".", "append", "(", "onnx", ".", "helper", ".", "make_tensor", "(", "name", "=", "power2_name", ",", "data_type", "=", "data_type", ",", "dims", "=", "(", "1", ",", ")", ",", "vals", "=", "[", "2", "]", ",", "raw", "=", "False", ",", ")", ")", "input_nodes", ".", "append", "(", "power2_name", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "\"Pow\"", ",", "input_nodes", ",", "[", "name", "]", ",", "name", "=", "name", ")", "return", "[", "tensor_node", ",", "node", "]" ]
[ 2332, 0 ]
[ 2361, 30 ]
python
en
['en', 'en', 'en']
True
convert_sum
(node, **kwargs)
Map MXNet's sum operator attributes to onnx's ReduceSum operator and return the created node.
Map MXNet's sum operator attributes to onnx's ReduceSum operator and return the created node.
def convert_sum(node, **kwargs): """Map MXNet's sum operator attributes to onnx's ReduceSum operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) mx_axis = attrs.get("axis", None) axes = convert_string_to_list(str(mx_axis)) if mx_axis is not None else None keepdims = get_boolean_attribute_value(attrs, "keepdims") if axes: node = onnx.helper.make_node( 'ReduceSum', inputs=input_nodes, outputs=[name], axes=axes, keepdims=keepdims, name=name ) else: node = onnx.helper.make_node( 'ReduceSum', inputs=input_nodes, outputs=[name], keepdims=keepdims, name=name ) return [node]
[ "def", "convert_sum", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "mx_axis", "=", "attrs", ".", "get", "(", "\"axis\"", ",", "None", ")", "axes", "=", "convert_string_to_list", "(", "str", "(", "mx_axis", ")", ")", "if", "mx_axis", "is", "not", "None", "else", "None", "keepdims", "=", "get_boolean_attribute_value", "(", "attrs", ",", "\"keepdims\"", ")", "if", "axes", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceSum'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "axes", "=", "axes", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "else", ":", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'ReduceSum'", ",", "inputs", "=", "input_nodes", ",", "outputs", "=", "[", "name", "]", ",", "keepdims", "=", "keepdims", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 2364, 0 ]
[ 2392, 17 ]
python
en
['en', 'la', 'en']
True
convert_shape
(node, **kwargs)
Map MXNet's shape_array operator attributes to onnx's Shape operator and return the created node.
Map MXNet's shape_array operator attributes to onnx's Shape operator and return the created node.
def convert_shape(node, **kwargs): """Map MXNet's shape_array operator attributes to onnx's Shape operator and return the created node. """ return create_basic_op_node('Shape', node, kwargs)
[ "def", "convert_shape", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Shape'", ",", "node", ",", "kwargs", ")" ]
[ 2396, 0 ]
[ 2400, 54 ]
python
en
['en', 'en', 'en']
True
convert_hardsigmoid
(node, **kwargs)
Map MXNet's hard_sigmoid operator attributes to onnx's HardSigmoid operator and return the created node.
Map MXNet's hard_sigmoid operator attributes to onnx's HardSigmoid operator and return the created node.
def convert_hardsigmoid(node, **kwargs): """Map MXNet's hard_sigmoid operator attributes to onnx's HardSigmoid operator and return the created node. """ name, input_nodes, attrs = get_inputs(node, kwargs) # Converting to float32 alpha = float(attrs.get("alpha", 0.2)) beta = float(attrs.get("beta", 0.5)) node = onnx.helper.make_node( 'HardSigmoid', input_nodes, [name], alpha=alpha, beta=beta, name=name ) return [node]
[ "def", "convert_hardsigmoid", "(", "node", ",", "*", "*", "kwargs", ")", ":", "name", ",", "input_nodes", ",", "attrs", "=", "get_inputs", "(", "node", ",", "kwargs", ")", "# Converting to float32", "alpha", "=", "float", "(", "attrs", ".", "get", "(", "\"alpha\"", ",", "0.2", ")", ")", "beta", "=", "float", "(", "attrs", ".", "get", "(", "\"beta\"", ",", "0.5", ")", ")", "node", "=", "onnx", ".", "helper", ".", "make_node", "(", "'HardSigmoid'", ",", "input_nodes", ",", "[", "name", "]", ",", "alpha", "=", "alpha", ",", "beta", "=", "beta", ",", "name", "=", "name", ")", "return", "[", "node", "]" ]
[ 2404, 0 ]
[ 2422, 17 ]
python
en
['en', 'fi', 'en']
True
convert_broadcast_lesser
(node, **kwargs)
Map MXNet's broadcast_lesser operator attributes to onnx's Less operator and return the created node.
Map MXNet's broadcast_lesser operator attributes to onnx's Less operator and return the created node.
def convert_broadcast_lesser(node, **kwargs): """Map MXNet's broadcast_lesser operator attributes to onnx's Less operator and return the created node. """ return create_basic_op_node('Less', node, kwargs)
[ "def", "convert_broadcast_lesser", "(", "node", ",", "*", "*", "kwargs", ")", ":", "return", "create_basic_op_node", "(", "'Less'", ",", "node", ",", "kwargs", ")" ]
[ 2425, 0 ]
[ 2429, 53 ]
python
en
['en', 'en', 'en']
True