| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586 |
- [
- {
- "name": "Transpose",
- "category": "Transform"
- },
- {
- "name": "Substraction",
- "category": "Transform"
- },
- {
- "name": "LayerNorm",
- "category": "Normalization"
- },
- {
- "name": "ConvolutionCV2",
- "category": "Layer"
- },
- {
- "name": "ReLU",
- "category": "Activation"
- },
- {
- "name": "Resize",
- "category": "Shape"
- },
- {
- "name": "ResizeNearestAsymFloor",
- "category": "Shape"
- },
- {
- "name": "Exp",
- "category": "Activation"
- },
- {
- "name": "Mish",
- "category": "Activation"
- },
- {
- "name": "Stddev",
- "category": "Normalization"
- },
- {
- "name": "AvgPooling",
- "category": "Pool"
- },
- {
- "name": "HardSigmoid",
- "category": "Activation"
- },
- {
- "name": "SeLU",
- "category": "Activation"
- },
- {
- "name": "LRN",
- "category": "Normalization"
- },
- {
- "name": "GeLU",
- "category": "Activation"
- },
- {
- "name": "Reshape",
- "category": "Shape"
- },
- {
- "name": "Tanh",
- "category": "Activation"
- },
- {
- "name": "MaxPooling",
- "category": "Pool"
- },
- {
- "name": "DepthwiseConvolution",
- "category": "Layer"
- },
- {
- "name": "Softmax",
- "category": "Activation"
- },
- {
- "name": "SiLU",
- "category": "Activation"
- }
- ]
|