Browse Source

Update onnx-metadata.json

Lutz Roeder 6 years ago
parent
commit
225580b62c
1 changed files with 62 additions and 2 deletions
  1. 62 2
      src/onnx-metadata.json

+ 62 - 2
src/onnx-metadata.json

@@ -14323,6 +14323,18 @@
           "code": "reduction = 'none'\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target'],\n    outputs=['loss'],\n    reduction=reduction\n)\n\nN, C = 3, 5\nnp.random.seed(0)\ninput = np.random.rand(N, C).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, ))\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=None, reduction=reduction)\n\nexpect(node, inputs=[input, target], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NC')",
           "summary": "input_shape_is_NC"
         },
+        {
+          "code": "reduction = 'mean'\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target'],\n    outputs=['loss'],\n    reduction=reduction\n)\n\nN, C, d1 = 3, 5, 2\nnp.random.seed(0)\ninput = np.random.rand(N, C, d1).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, d1))\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=None, reduction=reduction)\n\nexpect(node, inputs=[input, target], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1')",
+          "summary": "input_shape_is_NCd1"
+        },
+        {
+          "code": "reduction = 'mean'\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target', 'weight'],\n    outputs=['loss'],\n    reduction=reduction\n)\n\nN, C, d1 = 3, 5, 2\nnp.random.seed(0)\ninput = np.random.rand(N, C, d1).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, d1))\nweight = np.random.rand(C).astype(np.float32)\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=weight, reduction=reduction)\n\nexpect(node, inputs=[input, target, weight], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1_weight')",
+          "summary": "input_shape_is_NCd1_weight"
+        },
+        {
+          "code": "reduction = 'mean'\nignore_index = np.int64(1)\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target', 'weight'],\n    outputs=['loss'],\n    reduction=reduction,\n    ignore_index=ignore_index\n)\n\nN, C, d1 = 3, 5, 2\nnp.random.seed(0)\ninput = np.random.rand(N, C, d1).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, d1))\ntarget[0][0] = np.int64(1)\nweight = np.random.rand(C).astype(np.float32)\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=weight, reduction=reduction, ignore_index=ignore_index)\n\nexpect(node, inputs=[input, target, weight], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_iinput_shape_is_NCd1_weight_ignore_index')",
+          "summary": "input_shape_is_NCd1_weight_ignore_index"
+        },
         {
           "code": "reduction = 'none'\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target'],\n    outputs=['loss'],\n    reduction=reduction\n)\n\nN, C, dim1, dim2 = 3, 5, 6, 6\nnp.random.seed(0)\ninput = np.random.rand(N, C, dim1, dim2).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, dim1, dim2))\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=None, reduction=reduction)\n\nexpect(node, inputs=[input, target], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1d2')",
           "summary": "input_shape_is_NCd1d2"
@@ -14348,8 +14360,16 @@
           "summary": "input_shape_is_NCd1d2_with_weight_reduction_sum"
         },
         {
-          "code": "reduction = 'sum'\nignore_index = np.int64(0)\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target', 'weight'],\n    outputs=['loss'],\n    reduction=reduction,\n    ignore_index=ignore_index\n)\n\nN, C, dim1, dim2 = 3, 5, 6, 6\nnp.random.seed(0)\ninput = np.random.rand(N, C, dim1, dim2).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, dim1, dim2))\ntarget[0][0][0] = 0\nweight = np.random.rand(C).astype(np.float32)\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=weight, reduction=reduction, ignore_index=ignore_index)\n\nexpect(node, inputs=[input, target, weight], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1d2_with_weight_reduction_sum_ignore_index')",
+          "code": "reduction = 'sum'\nignore_index = np.int64(0)\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target', 'weight'],\n    outputs=['loss'],\n    reduction=reduction,\n    ignore_index=ignore_index\n)\n\nN, C, dim1, dim2 = 3, 5, 6, 6\nnp.random.seed(0)\ninput = np.random.rand(N, C, dim1, dim2).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, dim1, dim2))\ntarget[0][0][0] = np.int64(0)\nweight = np.random.rand(C).astype(np.float32)\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input, target, weight=weight, reduction=reduction, ignore_index=ignore_index)\n\nexpect(node, inputs=[input, target, weight], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1d2_with_weight_reduction_sum_ignore_index')",
           "summary": "input_shape_is_NCd1d2_with_weight_reduction_sum_ignore_index"
+        },
+        {
+          "code": "reduction = 'mean'\n\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target', 'weight'],\n    outputs=['loss'],\n    reduction=reduction)\n\nN, C, dim1, dim2, dim3, dim4, dim5 = 3, 5, 6, 6, 5, 3, 4\nnp.random.seed(0)\ninput = np.random.rand(N, C, dim1, dim2, dim3, dim4, dim5).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, dim1, dim2, dim3, dim4, dim5))\nweight = np.random.rand(C).astype(np.float32)\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input,\n                                                                    target,\n                                                                    weight=weight,\n                                                                    reduction=reduction)\n\nexpect(node, inputs=[input, target, weight], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1d2d3d4d5_mean_weight')",
+          "summary": "input_shape_is_NCd1d2d3d4d5_mean_weight"
+        },
+        {
+          "code": "reduction = 'none'\n\nnode = onnx.helper.make_node(\n    'NegativeLogLikelihoodLoss',\n    inputs=['input', 'target'],\n    outputs=['loss'],\n    reduction=reduction)\n\nN, C, dim1, dim2, dim3, dim4, dim5 = 3, 5, 6, 6, 5, 3, 4\nnp.random.seed(0)\ninput = np.random.rand(N, C, dim1, dim2, dim3, dim4, dim5).astype(np.float32)\ntarget = np.random.randint(0, high=C, size=(N, dim1, dim2, dim3, dim4, dim5))\n\nnegative_log_likelihood_loss = compute_negative_log_likelihood_loss(input,\n                                                                    target,\n                                                                    reduction=reduction)\n\nexpect(node, inputs=[input, target], outputs=[negative_log_likelihood_loss],\n    name='test_negative_log_likelihood_loss_input_shape_is_NCd1d2d3d4d5_none_no_weight')",
+          "summary": "input_shape_is_NCd1d2d3d4d5_none_no_weight"
         }
       ],
       "inputs": [
@@ -21711,6 +21731,26 @@
       "description": "Loss function that measures the softmax cross entropy\nbetween 'scores' and 'labels'.\nThis operator first computes a loss tensor whose shape is identical to the labels input.\nIf the input is 2-D with shape (N, C), the loss tensor may be a N-element vector L = (l_1, l_2, ..., l_N).\nIf the input is N-D tensor with shape (N, C, D1, D2, ..., Dk),\nthe loss tensor L may have (N, D1, D2, ..., Dk) as its shape and L[i,][j_1][j_2]...[j_k] denotes a scalar element in L.\nAfter L is available, this operator can optionally do a reduction operator.\n\nshape(scores): (N, C) where C is the number of classes, or (N, C, D1, D2,..., Dk),\n        with K >= 1 in case of K-dimensional loss.\nshape(labels): (N) where each value is 0 <= labels[i] <= C-1, or (N, D1, D2,..., Dk),\n        with K >= 1 in case of K-dimensional loss.\n\nThe loss for one sample, l_i, can caculated as follows:\n    l[i][d1][d2]...[dk] = -y[i][c][d1][d2]..[dk], where i is the index of classes.\nor\n    l[i][d1][d2]...[dk] = -y[i][c][d1][d2]..[dk] * weights[c], if 'weights' is provided.\n\nwhere:\n    p = Softmax(scores)\n    y = Log(p)\n    c = labels[i][d1][d2]...[dk]\n\nFinally, L is optionally reduced:\nIf reduction = 'none', the output is L with shape (N, D1, D2, ..., Dk).\nIf reduction = 'sum', the output is scalar: Sum(L).\nIf reduction = 'mean', the output is scalar: ReduceMean(L), or if weight is provided: ReduceSum(L) / ReduceSum(W),\nwhere tensor W is of shape (N, D1, D2, ..., Dk) and W[n][d1][d2]...[dk] = weights[labels[i][d1][d2]...[dk]].\n",
       "domain": "ai.onnx",
       "examples": [
+        {
+          "code": "reduction = 'mean'\nignore_index = np.int64(-1)\n\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction,\n                             ignore_index=ignore_index)\n\nN, C, dim1 = 3, 5, 6\nnp.random.seed(0)\nx = np.random.rand(N, C, dim1).astype(np.float32)\nlabels = np.random.randint(0, high=C, size=(N, dim1))\nlabels[0][0] = -1\nweight = np.random.rand(C).astype(np.float32)\n\nsce = softmaxcrossentropy(x,\n                          labels,\n                          weight=weight,\n                          reduction=reduction,\n                          ignore_index=ignore_index)\n\nexpect(node, inputs=[x, labels, weight], outputs=[sce], name='test_softmax_cross_entropy_input_shape_is_NCd1_mean_weight_negative_ignore_index')",
+          "summary": "input_shape_is_NCd1_mean_weight_negative_ignore_index"
+        },
+        {
+          "code": "reduction = 'none'\nignore_index = np.int64(-5)\n\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y'],\n                             outputs=['z'],\n                             reduction=reduction,\n                             ignore_index=ignore_index)\n\nN, C, dim1, dim2, dim3 = 3, 5, 6, 6, 5\nnp.random.seed(0)\nx = np.random.rand(N, C, dim1, dim2, dim3).astype(np.float32)\nlabels = np.random.randint(0, high=C, size=(N, dim1, dim2, dim3))\nlabels[0][0][0][0] = -5\n\nsce = softmaxcrossentropy(x,\n                          labels,\n                          reduction=reduction,\n                          ignore_index=ignore_index)\n\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_input_shape_is_NCd1d2d3_none_no_weight_negative_ignore_index')",
+          "summary": "input_shape_is_NCd1d2d3_none_no_weight_negative_ignore_index"
+        },
+        {
+          "code": "reduction = 'sum'\nignore_index = np.int64(10)\n\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction,\n                             ignore_index=ignore_index)\n\nN, C = 3, 5\nnp.random.seed(0)\nx = np.random.rand(N, C).astype(np.float32)\nlabels = np.random.randint(0, high=C, size=(N))\nlabels[0] = 10\nweight = np.random.rand(C).astype(np.float32)\n\nsce = softmaxcrossentropy(x,\n                          labels,\n                          weight=weight,\n                          reduction=reduction,\n                          ignore_index=ignore_index)\n\nexpect(node, inputs=[x, labels, weight], outputs=[sce], name='test_softmax_cross_entropy_input_shape_is_NCd1d2d3_sum_weight_high_ignore_index')",
+          "summary": "input_shape_is_NCd1d2d3_sum_weight_high_ignore_index"
+        },
+        {
+          "code": "reduction = 'mean'\n\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction)\n\nN, C, dim1, dim2, dim3, dim4, dim5 = 3, 5, 6, 6, 5, 3, 4\nnp.random.seed(0)\nx = np.random.rand(N, C, dim1, dim2, dim3, dim4, dim5).astype(np.float32)\nlabels = np.random.randint(0, high=C, size=(N, dim1, dim2, dim3, dim4, dim5))\nweight = np.random.rand(C).astype(np.float32)\n\nsce = softmaxcrossentropy(x,\n                        labels,\n                        weight=weight,\n                        reduction=reduction)\n\nexpect(node, inputs=[x, labels, weight], outputs=[sce], name='test_softmax_cross_entropy_input_shape_is_NCd1d2d3d4d5_mean_weight')",
+          "summary": "input_shape_is_NCd1d2d3d4d5_mean_weight"
+        },
+        {
+          "code": "reduction = 'none'\n\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y'],\n                             outputs=['z'],\n                             reduction=reduction)\n\nN, C, dim1, dim2, dim3, dim4, dim5 = 3, 5, 6, 6, 5, 3, 4\nnp.random.seed(0)\nx = np.random.rand(N, C, dim1, dim2, dim3, dim4, dim5).astype(np.float32)\nlabels = np.random.randint(0, high=C, size=(N, dim1, dim2, dim3, dim4, dim5))\n\nsce = softmaxcrossentropy(x,\n                        labels,\n                        reduction=reduction)\n\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_input_shape_is_NCd1d2d3d4d5_none_no_weight')",
+          "summary": "input_shape_is_NCd1d2d3d4d5_none_no_weight"
+        },
         {
           "code": "# Define operator attributes.\nreduction = 'mean'\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y'],\n                             outputs=['z'],\n                             reduction=reduction)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels)\n\n# Check results\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_mean')",
           "summary": "softmaxcrossentropy_mean"
@@ -21719,14 +21759,34 @@
           "code": "# Define operator attributes.\nreduction = 'mean'\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y'],\n                             outputs=['z'],\n                             reduction=reduction)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5, 2).astype(np.float32)\ny = np.random.randint(0, high=5, size=(3, 2))\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, y)\n\n# Check results\nexpect(node, inputs=[x, y], outputs=[sce], name='test_softmax_cross_entropy_mean_3d')",
           "summary": "softmaxcrossentropy_mean_3d"
         },
+        {
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(2)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                            inputs=['x', 'y'],\n                            outputs=['z'],\n                            reduction=reduction,\n                            ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\nlabels[0] = np.int64(2)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_mean_no_weight_ignore_index')",
+          "summary": "softmaxcrossentropy_mean_no_weights_ignore_index"
+        },
+        {
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(2)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                            inputs=['x', 'y'],\n                            outputs=['z'],\n                            reduction=reduction,\n                            ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5, 2).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, 2))\nlabels[0][0] = np.int64(2)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_mean_no_weight_ignore_index_3d')",
+          "summary": "softmaxcrossentropy_mean_no_weights_ignore_index_3d"
+        },
+        {
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(2)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                            inputs=['x', 'y'],\n                            outputs=['z'],\n                            reduction=reduction,\n                            ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5, 2, 7).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, 2, 7))\nlabels[0][0][0] = np.int64(2)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, reduction=reduction, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_mean_no_weight_ignore_index_4d')",
+          "summary": "softmaxcrossentropy_mean_no_weights_ignore_index_4d"
+        },
         {
           "code": "# Define operator attributes.\nreduction = 'mean'\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\nweights = np.array([0.9, 0.7, 0.8, 0.9, 0.9], dtype=np.float32)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, weight=weights)\n\n# Check results\nexpect(node, inputs=[x, labels, weights], outputs=[sce], name='test_softmax_cross_entropy_mean_weight')",
           "summary": "softmaxcrossentropy_mean_weights"
         },
         {
-          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(0)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction,\n                             ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\nlabels[0] = 0\nweights = np.array([0.9, 0.7, 0.8, 0.9, 0.9], dtype=np.float32)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, weight=weights, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels, weights], outputs=[sce], name='test_softmax_cross_entropy_mean_weight_ignore_index')",
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(0)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y', 'w'],\n                             outputs=['z'],\n                             reduction=reduction,\n                             ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\nlabels[0] = np.int64(0)\nweights = np.array([0.9, 0.7, 0.8, 0.9, 0.9], dtype=np.float32)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, weight=weights, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels, weights], outputs=[sce], name='test_softmax_cross_entropy_mean_weight_ignore_index')",
           "summary": "softmaxcrossentropy_mean_weights_ignore_index"
         },
+        {
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(1)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                            inputs=['x', 'y', 'w'],\n                            outputs=['z'],\n                            reduction=reduction,\n                            ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5, 2).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, 2))\nlabels[0][0] = np.int64(1)\nweights = np.array([0.2, 0.3, 0.6, 0.1, 0.5], dtype=np.float32)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, weight=weights, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels, weights], outputs=[sce], name='test_softmax_cross_entropy_mean_weight_ignore_index_3d')",
+          "summary": "softmaxcrossentropy_mean_weights_ignore_index_3d"
+        },
+        {
+          "code": "# Define operator attributes.\nreduction = 'mean'\nignore_index = np.int64(2)\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                            inputs=['x', 'y', 'w'],\n                            outputs=['z'],\n                            reduction=reduction,\n                            ignore_index=ignore_index)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5, 2, 7).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, 2, 7))\nlabels[0][0][0] = np.int64(2)\nweights = np.array([0.2, 0.3, 0.6, 0.1, 0.5], dtype=np.float32)\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, reduction=reduction, weight=weights, ignore_index=ignore_index)\n\n# Check results\nexpect(node, inputs=[x, labels, weights], outputs=[sce], name='test_softmax_cross_entropy_mean_weight_ignore_index_4d')",
+          "summary": "softmaxcrossentropy_mean_weights_ignore_index_4d"
+        },
         {
           "code": "# Define operator attributes.\nreduction = 'none'\n\n# Create operator.\nnode = onnx.helper.make_node('SoftmaxCrossEntropyLoss',\n                             inputs=['x', 'y'],\n                             outputs=['z'],\n                             reduction=reduction)\n\n# Define operator inputs.\nnp.random.seed(0)\nx = np.random.rand(3, 5).astype(np.float32)\nlabels = np.random.randint(0, high=5, size=(3, ))\n\n# Compute SoftmaxCrossEntropyLoss\nsce = softmaxcrossentropy(x, labels, reduction='none')\n\n# Check results\nexpect(node, inputs=[x, labels], outputs=[sce], name='test_softmax_cross_entropy_none')",
           "summary": "softmaxcrossentropy_none"