Parcourir la source

Update TensorFlow.js loader (#270)

Lutz Roeder il y a 6 ans
Parent
commit
c2b0897bf0
4 fichiers modifiés avec 128 ajouts et 60 suppressions
  1. 66 22
      src/keras-metadata.json
  2. 50 37
      src/keras.js
  3. 8 1
      test/models.json
  4. 4 0
      test/test.js

+ 66 - 22
src/keras-metadata.json

@@ -1341,90 +1341,112 @@
           "name": "units"
         },
         {
+          "default": "tanh",
           "description": "Activation function to use\n    (see [activations](https://keras.io/activations)).\n    Default: hyperbolic tangent (`tanh`).\n    If you pass `None`, no activation is applied\n    (ie. \"linear\" activation: `a(x) = x`).",
           "name": "activation"
         },
         {
+          "default": "hard_sigmoid",
           "description": "Activation function to use\n    for the recurrent step\n    (see [activations](https://keras.io/activations)).\n    Default: hard sigmoid (`hard_sigmoid`).\n    If you pass `None`, no activation is applied\n    (ie. \"linear\" activation: `a(x) = x`).",
           "name": "recurrent_activation"
         },
         {
           "description": "Boolean, whether the layer uses a bias vector.",
-          "name": "use_bias"
+          "name": "use_bias",
+          "visible": false
         },
         {
           "description": "Initializer for the `kernel` weights matrix,\n    used for the linear transformation of the inputs.\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "kernel_initializer"
+          "name": "kernel_initializer",
+          "visible": false
         },
         {
           "description": "Initializer for the `recurrent_kernel`\n    weights matrix,\n    used for the linear transformation of the recurrent state.\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "recurrent_initializer"
+          "name": "recurrent_initializer",
+          "visible": false
         },
         {
           "description": "Initializer for the bias vector\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "bias_initializer"
+          "name": "bias_initializer",
+          "visible": false
         },
         {
+          "default": true,
           "description": "Boolean.\n    If True, add 1 to the bias of the forget gate at initialization.\n    Setting it to true will also force `bias_initializer=\"zeros\"`.\n    This is recommended in [Jozefowicz et al. (2015)](\n    http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf).",
           "name": "unit_forget_bias"
         },
         {
           "description": "Regularizer function applied to\n    the `kernel` weights matrix\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "kernel_regularizer"
+          "name": "kernel_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to\n    the `recurrent_kernel` weights matrix\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "recurrent_regularizer"
+          "name": "recurrent_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to the bias vector\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "bias_regularizer"
+          "name": "bias_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to\n    the output of the layer (its \"activation\").\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "activity_regularizer"
+          "name": "activity_regularizer",
+          "visible": false
         },
         {
           "description": "Constraint function applied to\n    the `kernel` weights matrix\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "kernel_constraint"
+          "name": "kernel_constraint",
+          "visible": false
         },
         {
           "description": "Constraint function applied to\n    the `recurrent_kernel` weights matrix\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "recurrent_constraint"
+          "name": "recurrent_constraint",
+          "visible": false
         },
         {
           "description": "Constraint function applied to the bias vector\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "bias_constraint"
+          "name": "bias_constraint",
+          "visible": false
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the inputs.",
           "name": "dropout"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the recurrent state.",
           "name": "recurrent_dropout"
         },
         {
+          "default": 1,
           "description": "Implementation mode, either 1 or 2.\n    Mode 1 will structure its operations as a larger number of\n    smaller dot products and additions, whereas mode 2 will\n    batch them into fewer, larger operations. These modes will\n    have different performance profiles on different hardware and\n    for different applications.",
           "name": "implementation"
         },
         {
+          "default": false,
           "description": "Boolean. Whether to return the last output\n    in the output sequence, or the full sequence.",
           "name": "return_sequences"
         },
         {
+          "default": false,
           "description": "Boolean. Whether to return the last state\n    in addition to the output. The returned elements of the\n    states list are the hidden state and the cell state, respectively.",
           "name": "return_state"
         },
         {
+          "default": false,
           "description": "Boolean (default False).\n    If True, process the input sequence backwards and return the\n    reversed sequence.",
           "name": "go_backwards"
         },
         {
+          "default": false,
           "description": "Boolean (default False). If True, the last state\n    for each sample at index i in a batch will be used as initial\n    state for the sample of index i in the following batch.",
           "name": "stateful"
         },
         {
+          "default": false,
           "description": "Boolean (default False).\n    If True, the network will be unrolled,\n    else a symbolic loop will be used.\n    Unrolling can speed-up a RNN,\n    although it tends to be more memory-intensive.\n    Unrolling is only suitable for short sequences.\n",
           "name": "unroll"
         }
@@ -1681,20 +1703,25 @@
           "name": "recurrent_activation"
         },
         {
+          "default": true,
           "description": "Boolean, whether the layer uses a bias vector.",
-          "name": "use_bias"
+          "name": "use_bias",
+          "visible": false
         },
         {
           "description": "Initializer for the `kernel` weights matrix,\n    used for the linear transformation of the inputs.\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "kernel_initializer"
+          "name": "kernel_initializer",
+          "visible": false
         },
         {
           "description": "Initializer for the `recurrent_kernel`\n    weights matrix,\n    used for the linear transformation of the recurrent state.\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "recurrent_initializer"
+          "name": "recurrent_initializer",
+          "visible": false
         },
         {
           "description": "Initializer for the bias vector\n    (see [initializers](https://keras.io/initializers)).",
-          "name": "bias_initializer"
+          "name": "bias_initializer",
+          "visible": false
         },
         {
           "description": "Boolean.\n    If True, add 1 to the bias of the forget gate at initialization.\n    Use in combination with `bias_initializer=\"zeros\"`.\n    This is recommended in [Jozefowicz et al. (2015)](\n    http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf).",
@@ -1702,31 +1729,38 @@
         },
         {
           "description": "Regularizer function applied to\n    the `kernel` weights matrix\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "kernel_regularizer"
+          "name": "kernel_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to\n    the `recurrent_kernel` weights matrix\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "recurrent_regularizer"
+          "name": "recurrent_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to the bias vector\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "bias_regularizer"
+          "name": "bias_regularizer",
+          "visible": false
         },
         {
           "description": "Regularizer function applied to\n    the output of the layer (its \"activation\").\n    (see [regularizer](https://keras.io/regularizers)).",
-          "name": "activity_regularizer"
+          "name": "activity_regularizer",
+          "visible": false
         },
         {
           "description": "Constraint function applied to\n    the `kernel` weights matrix\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "kernel_constraint"
+          "name": "kernel_constraint",
+          "visible": false
         },
         {
           "description": "Constraint function applied to\n    the `recurrent_kernel` weights matrix\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "recurrent_constraint"
+          "name": "recurrent_constraint",
+          "visible": false
         },
         {
           "description": "Constraint function applied to the bias vector\n    (see [constraints](https://keras.io/constraints)).",
-          "name": "bias_constraint"
+          "name": "bias_constraint",
+          "visible": false
         },
         {
           "description": "Boolean. Whether to return the last output\n    in the output sequence, or the full sequence.",
@@ -1741,6 +1775,7 @@
           "name": "stateful"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the inputs.",
           "name": "dropout"
         },
@@ -2185,10 +2220,12 @@
           "name": "bias_constraint"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the inputs.",
           "name": "dropout"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the recurrent state.\n",
           "name": "recurrent_dropout"
         },
@@ -2218,6 +2255,7 @@
           "name": "recurrent_activation"
         },
         {
+          "default": true,
           "description": "Boolean, whether the layer uses a bias vector.",
           "name": "use_bias",
           "visible": false
@@ -2265,10 +2303,12 @@
           "name": "bias_constraint"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the inputs.",
           "name": "dropout"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the recurrent state.",
           "name": "recurrent_dropout"
         },
@@ -2306,6 +2346,7 @@
           "name": "recurrent_activation"
         },
         {
+          "default": true,
           "description": "Boolean, whether the layer uses a bias vector.",
           "name": "use_bias"
         },
@@ -2350,10 +2391,12 @@
           "name": "bias_constraint"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the inputs.",
           "name": "dropout"
         },
         {
+          "default": 0.0,
           "description": "Float between 0 and 1.\n    Fraction of the units to drop for\n    the linear transformation of the recurrent state.",
           "name": "recurrent_dropout"
         },
@@ -2655,6 +2698,7 @@
           "name": "activation"
         },
         {
+          "default": true,
           "description": "Boolean, whether the layer uses a bias vector.",
           "name": "use_bias",
           "visible": false

+ 50 - 37
src/keras.js

@@ -33,8 +33,11 @@ keras.ModelFactory = class {
                     if (root && root.nodes && root.arg_nodes && root.heads) {
                         return false;
                     }
-                    if (root && root.modelTopology && root.modelTopology.model_config) {
-                        root = root.modelTopology.model_config;
+                    if (root && root.modelTopology) {
+                        root = root.modelTopology;
+                    }
+                    if (root && root.model_config) {
+                        root = root.model_config;
                     }
                     if (root && root.class_name) {
                         return true;
@@ -55,9 +58,12 @@ keras.ModelFactory = class {
                 return;
             }
             var format = 'Keras';
+            var producer = '';
+            var version = '';
+            var backend = '';
             var model_config = null;
             var rootGroup = null;
-            var rootJson = null;
+            var weightsManifest = null;
             var identifier = context.identifier;
             try {
                 switch (identifier.split('.').pop().toLowerCase()) {
@@ -74,13 +80,30 @@ keras.ModelFactory = class {
                         if (rootGroup.attribute('model_config')) {
                             model_config = JSON.parse(rootGroup.attribute('model_config'));
                         }
+                        backend = rootGroup.attribute('backend') || '';
+                        version = rootGroup.attribute('keras_version') || '';
+                        format = format + (version ? (' v' + version) : '');
                         break;
                     case 'json':
                         model_config = JSON.parse(context.text);
-                        if (model_config && model_config.modelTopology && model_config.modelTopology.model_config) {
-                            format = 'TensorFlow.js ' + format;
-                            rootJson = model_config;
-                            model_config = model_config.modelTopology.model_config;
+                        if (model_config.keras_version) {
+                            version = model_config.keras_version;
+                            format = format + (version ? (' v' + version) : '');
+                        }
+                        if (model_config.backend) {
+                            backend = model_config.backend;
+                        }
+                        if (model_config && model_config.modelTopology) {
+                            weightsManifest = model_config.weightsManifest || null;
+                            backend = model_config.modelTopology.backend;
+                            version = model_config.modelTopology.keras_version;
+                            format = format + (version ? (' v' + version) : '');
+                            format = 'TensorFlow.js ' + (model_config.format ? model_config.format : format);
+                            producer = model_config.generatedBy ? model_config.generatedBy : '';
+                            model_config = model_config.modelTopology;
+                        }
+                        if (model_config.model_config) {
+                            model_config = model_config.model_config;
                         }
                         break;
                 }
@@ -103,7 +126,7 @@ keras.ModelFactory = class {
  
             keras.Metadata.open(host, (err, metadata) => {
                 try {
-                    var model = new keras.Model(metadata, format, model_config, rootGroup, rootJson);
+                    var model = new keras.Model(metadata, format, producer, backend, model_config, rootGroup, weightsManifest);
                     callback(null, model);
                     return;
                 }
@@ -120,21 +143,15 @@ keras.ModelFactory = class {
 
 keras.Model = class {
 
-    constructor(metadata, format, model_config, rootGroup, rootJson) {
+    constructor(metadata, format, producer, backend, model_config, rootGroup, weightsManifest) {
         this._format = format;
+        this._backend = backend;
+        this._producer = producer;
         this._graphs = [];
 
         var initializer;
         var weights = {};
         if (rootGroup) {
-            var version = rootGroup.attribute('keras_version');
-            if (version) {
-                this._version = version;
-            }
-            var backend = rootGroup.attribute('backend');
-            if (backend) {
-                this._backend = backend;
-            }
             var model_weights_group = rootGroup.group('model_weights');
             if (!model_weights_group && rootGroup.attribute('layer_names')) {
                 model_weights_group = rootGroup;
@@ -180,25 +197,17 @@ keras.Model = class {
                 }
             }
         }
-        else if (rootJson) {
-            if (rootJson.modelTopology && rootJson.modelTopology.keras_version) {
-                this._version = rootJson.modelTopology.keras_version;
-            }
-            if (rootJson.modelTopology && rootJson.modelTopology.backend) {
-                this._backend = rootJson.modelTopology.backend;
-            }
-            if (rootJson.weightsManifest) {
-                for (var manifest of rootJson.weightsManifest) {
-                    for (var weight of manifest.weights) {
-                        var p = weight.name.split('/');
-                        p.pop();
-                        initializer = new keras.Tensor(weight.name, weight.dtype, weight.shape, null, manifest.paths.join(';'));
-                        while (p.length > 0) {
-                            var weightName = p.join('/');
-                            weights[weightName] = weights[weightName] || [];
-                            weights[weightName].push(initializer);
-                            p.shift();
-                        }
+        else if (weightsManifest) {
+            for (var manifest of weightsManifest) {
+                for (var weight of manifest.weights) {
+                    var p = weight.name.split('/');
+                    p.pop();
+                    initializer = new keras.Tensor(weight.name, weight.dtype, weight.shape, null, manifest.paths.join(';'));
+                    while (p.length > 0) {
+                        var weightName = p.join('/');
+                        weights[weightName] = weights[weightName] || [];
+                        weights[weightName].push(initializer);
+                        p.shift();
                     }
                 }
             }
@@ -217,7 +226,11 @@ keras.Model = class {
     }
 
     get format() {
-        return this._format + (this._version ? (' v' + this._version) : '');
+        return this._format;
+    }
+
+    get producer() {
+        return this._producer;
     }
 
     get runtime() {

+ 8 - 1
test/models.json

@@ -1932,7 +1932,7 @@
     "type":   "keras",
     "target": "tiramisu_fc_dense103_model.json",
     "source": "https://raw.githubusercontent.com/0bserver07/One-Hundred-Layers-Tiramisu/master/tiramisu_fc_dense103_model.json",
-    "format": "Keras",
+    "format": "Keras v2.0.2", "runtime": "tensorflow",
     "link":   "https://github.com/0bserver07/One-Hundred-Layers-Tiramisu"
   },
   {
@@ -3834,6 +3834,13 @@
     "format": "TensorFlow Graph",
     "link":   "https://deepdetect.com/models/tf"
   },
+  {
+    "type":   "tfjs",
+    "target": "air-time-model/air-time-model.json",
+    "source": "https://github.com/lutzroeder/netron/files/3170424/air-time-model.json.zip[air-time-model.json]",
+    "format": "TensorFlow.js layers-model", "producer": "TensorFlow.js tfjs-layers v1.0.4",
+    "link":   "https://github.com/lutzroeder/netron/issues/270"
+  },
   {
     "type":   "tfjs",
     "target": "mobilenet_v1_0.25_224/model.json",

+ 4 - 0
test/test.js

@@ -452,6 +452,10 @@ function loadModel(target, item, callback) {
             callback(new Error("Invalid producer '" + model.producer + "'."), null);
             return;
         }
+        if (item.runtime && model.runtime != item.runtime) {
+            callback(new Error("Invalid runtime '" + model.runtime + "'."), null);
+            return;
+        }
         try {
             for (var graph of model.graphs) {
                 var input;