onnx-metadata.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. #!/usr/bin/env python
  2. from __future__ import unicode_literals
  3. import json
  4. import io
  5. import sys
  6. from onnx import defs
  7. from onnx.defs import OpSchema
  8. from onnx.backend.test.case import collect_snippets
  9. snippets = collect_snippets()
  10. categories = {
  11. 'Constant': 'Constant',
  12. 'Conv': 'Layer',
  13. 'ConvTranspose': 'Layer',
  14. 'FC': 'Layer',
  15. 'RNN': 'Layer',
  16. 'LSTM': 'Layer',
  17. 'GRU': 'Layer',
  18. 'Gemm': 'Layer',
  19. 'Dropout': 'Dropout',
  20. 'Elu': 'Activation',
  21. 'HardSigmoid': 'Activation',
  22. 'LeakyRelu': 'Activation',
  23. 'PRelu': 'Activation',
  24. 'ThresholdedRelu': 'Activation',
  25. 'Relu': 'Activation',
  26. 'Selu': 'Activation',
  27. 'Sigmoid': 'Activation',
  28. 'Tanh': 'Activation',
  29. 'LogSoftmax': 'Activation',
  30. 'Softmax': 'Activation',
  31. 'Softplus': 'Activation',
  32. 'Softsign': 'Activation',
  33. 'BatchNormalization': 'Normalization',
  34. 'InstanceNormalization': 'Normalization',
  35. 'LpNormalization': 'Normalization',
  36. 'LRN': 'Normalization',
  37. 'Flatten': 'Shape',
  38. 'Reshape': 'Shape',
  39. 'Transpose': 'Shape',
  40. 'Tile': 'Shape',
  41. 'Xor': 'Logic',
  42. 'Not': 'Logic',
  43. 'Or': 'Logic',
  44. 'Less': 'Logic',
  45. 'And': 'Logic',
  46. 'Greater': 'Logic',
  47. 'Equal': 'Logic',
  48. 'AveragePool': 'Pool',
  49. 'GlobalAveragePool': 'Pool',
  50. 'GlobalLpPool': 'Pool',
  51. 'GlobalMaxPool': 'Pool',
  52. 'LpPool': 'Pool',
  53. 'MaxPool': 'Pool',
  54. 'MaxRoiPool': 'Pool',
  55. 'Concat': 'Tensor',
  56. 'Slice': 'Tensor',
  57. 'Split': 'Tensor',
  58. 'Pad': 'Tensor',
  59. 'ImageScaler': 'Data',
  60. 'Crop': 'Data',
  61. 'Gather': 'Transform',
  62. }
  63. def generate_json_attr_type(type):
  64. assert isinstance(type, OpSchema.AttrType)
  65. s = str(type)
  66. s = s[s.rfind('.')+1:].lower()
  67. if s[-1] == 's':
  68. s = s[0:-1] + '[]'
  69. return s
  70. def generate_json_attr_default_value(attr_value):
  71. if not str(attr_value):
  72. return None
  73. if attr_value.HasField('i'):
  74. return attr_value.i
  75. if attr_value.HasField('s'):
  76. return attr_value.s.decode('utf8')
  77. if attr_value.HasField('f'):
  78. return attr_value.f
  79. return None
  80. def generate_json_support_level_name(support_level):
  81. assert isinstance(support_level, OpSchema.SupportType)
  82. s = str(support_level)
  83. return s[s.rfind('.')+1:].lower()
  84. def generate_json_types(types):
  85. r = []
  86. for type in types:
  87. r.append(type)
  88. r = sorted(r)
  89. return r
  90. def generate_json(schemas, json_file):
  91. json_root = []
  92. for schema in schemas:
  93. json_schema = {}
  94. if schema.domain:
  95. json_schema['domain'] = schema.domain
  96. else:
  97. json_schema['domain'] = 'ai.onnx'
  98. json_schema['since_version'] = schema.since_version
  99. json_schema['support_level'] = generate_json_support_level_name(schema.support_level)
  100. if schema.doc:
  101. json_schema['description'] = schema.doc.lstrip()
  102. if schema.inputs:
  103. json_schema['inputs'] = []
  104. for input in schema.inputs:
  105. json_input = {}
  106. json_input['name'] = input.name
  107. json_input['description'] = input.description
  108. json_input['type'] = input.typeStr
  109. if input.option == OpSchema.FormalParameterOption.Optional:
  110. json_input['option'] = 'optional'
  111. elif input.option == OpSchema.FormalParameterOption.Variadic:
  112. json_input['option'] = 'variadic'
  113. json_schema['inputs'].append(json_input)
  114. json_schema['min_input'] = schema.min_input
  115. json_schema['max_input'] = schema.max_input
  116. if schema.outputs:
  117. json_schema['outputs'] = []
  118. for output in schema.outputs:
  119. json_output = {}
  120. json_output['name'] = output.name
  121. json_output['description'] = output.description
  122. json_output['type'] = output.typeStr
  123. if output.option == OpSchema.FormalParameterOption.Optional:
  124. json_output['option'] = 'optional'
  125. elif output.option == OpSchema.FormalParameterOption.Variadic:
  126. json_output['option'] = 'variadic'
  127. json_schema['outputs'].append(json_output)
  128. json_schema['min_output'] = schema.min_output
  129. json_schema['max_output'] = schema.max_output
  130. if schema.attributes:
  131. json_schema['attributes'] = []
  132. for _, attribute in sorted(schema.attributes.items()):
  133. json_attribute = {}
  134. json_attribute['name'] = attribute.name
  135. json_attribute['description'] = attribute.description
  136. json_attribute['type'] = generate_json_attr_type(attribute.type)
  137. json_attribute['required'] = attribute.required
  138. default_value = generate_json_attr_default_value(attribute.default_value)
  139. if default_value:
  140. json_attribute['default'] = default_value
  141. json_schema['attributes'].append(json_attribute)
  142. if schema.type_constraints:
  143. json_schema['type_constraints'] = []
  144. for type_constraint in schema.type_constraints:
  145. json_schema['type_constraints'].append({
  146. 'description': type_constraint.description,
  147. 'type_param_str': type_constraint.type_param_str,
  148. 'allowed_type_strs': type_constraint.allowed_type_strs
  149. })
  150. if schema.name in snippets:
  151. json_schema['examples'] = []
  152. for summary, code in sorted(snippets[schema.name]):
  153. json_schema['examples'].append({
  154. 'summary': summary,
  155. 'code': code
  156. })
  157. if schema.name in categories:
  158. json_schema['category'] = categories[schema.name]
  159. json_root.append({
  160. 'name': schema.name,
  161. 'schema': json_schema
  162. })
  163. with io.open(json_file, 'w', newline='') as fout:
  164. json_root = json.dumps(json_root, sort_keys=True, indent=2)
  165. for line in json_root.splitlines():
  166. line = line.rstrip()
  167. if sys.version_info[0] < 3:
  168. line = unicode(line)
  169. fout.write(line)
  170. fout.write('\n')
  171. if __name__ == '__main__':
  172. schemas = defs.get_all_schemas_with_history()
  173. schemas = sorted(schemas, key=lambda schema: schema.name)
  174. generate_json(schemas, '../src/onnx-metadata.json')