소스 검색

MediaPipe support (#423)

Lutz Roeder 6 년 전
부모
커밋
8bea05f94e
5개의 변경된 파일329개의 추가작업 그리고 109개의 파일을 삭제
  1. 1 1
      README.md
  2. 1 1
      setup.py
  3. 262 12
      src/mediapipe.js
  4. 5 1
      src/pytorch.js
  5. 60 94
      test/models.json

+ 1 - 1
README.md

@@ -5,7 +5,7 @@ Netron is a viewer for neural network, deep learning and machine learning models
 
 Netron supports **ONNX** (`.onnx`, `.pb`, `.pbtxt`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`, `predict_net.pbtxt`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), **ncnn** (`.param`) and **TensorFlow Lite** (`.tflite`).
 
-Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **Arm NN** (`.armnn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **ML.NET** (`.zip`), **MNN** (`.mnn`), **OpenVINO** (`.xml`), **PaddlePaddle** (`.zip`, `__model__`), **scikit-learn** (`.pkl`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).
+Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **Arm NN** (`.armnn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **MediaPipe** (`.pbtxt`), **ML.NET** (`.zip`), **MNN** (`.mnn`), **OpenVINO** (`.xml`), **PaddlePaddle** (`.zip`, `__model__`), **scikit-learn** (`.pkl`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).
 
 <p align='center'><a href='https://www.lutzroeder.com/ai'><img src='.github/screenshot.png' width='800'></a></p>
 

+ 1 - 1
setup.py

@@ -66,7 +66,7 @@ setuptools.setup(
     version=package_version(),
     description="Viewer for neural network, deep learning and machine learning models",
     long_description='Netron is a viewer for neural network, deep learning and machine learning models.\n\n' +
-                     'Netron supports **ONNX** (`.onnx`, `.pb`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), ncnn (`.param`) and **TensorFlow Lite** (`.tflite`). Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **ArmNN** (`.armnn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **PaddlePaddle** (`__model__`), **ML.NET** (`.zip`), MNN (`.mnn`), **OpenVINO** (`.xml`), **scikit-learn** (`.pkl`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).',
+                     'Netron supports **ONNX** (`.onnx`, `.pb`), **Keras** (`.h5`, `.keras`), **Core ML** (`.mlmodel`), **Caffe** (`.caffemodel`, `.prototxt`), **Caffe2** (`predict_net.pb`), **Darknet** (`.cfg`), **MXNet** (`.model`, `-symbol.json`), ncnn (`.param`) and **TensorFlow Lite** (`.tflite`). Netron has experimental support for **TorchScript** (`.pt`, `.pth`), **PyTorch** (`.pt`, `.pth`), **Torch** (`.t7`), **ArmNN** (`.armnn`), **BigDL** (`.bigdl`, `.model`), **Chainer** (`.npz`, `.h5`), **CNTK** (`.model`, `.cntk`), **Deeplearning4j** (`.zip`), **PaddlePaddle** (`__model__`), **MediaPipe** (`.pbtxt`), **ML.NET** (`.zip`), MNN (`.mnn`), **OpenVINO** (`.xml`), **scikit-learn** (`.pkl`), **TensorFlow.js** (`model.json`, `.pb`) and **TensorFlow** (`.pb`, `.meta`, `.pbtxt`, `.ckpt`, `.index`).',
     keywords=[
         'onnx', 'keras', 'tensorflow', 'tflite', 'coreml', 'mxnet', 'caffe', 'caffe2', 'torchscript', 'pytorch', 'ncnn', 'mnn' 'openvino', 'darknet', 'paddlepaddle', 'chainer',
         'artificial intelligence', 'machine learning', 'deep learning', 'neural network',

+ 262 - 12
src/mediapipe.js

@@ -11,17 +11,27 @@ mediapipe.ModelFactory = class {
         const extension = identifier.split('.').pop().toLowerCase();
         if (extension === 'pbtxt') {
             const tags = context.tags('pbtxt');
-            if (tags.has('node') && (tags.has('input_side_packet') || tags.has('input_stream') || tags.has('output_stream'))) {
+            const text = context.text;
+            if (tags.has('node') && (text.indexOf('input_stream:') !== -1 || text.indexOf('input_side_packet:') !== -1 || text.indexOf('output_stream:') !== -1)) {
                 return true;
             }
         }
         return false;
     }
 
-    open(context /*, host */) {
-        const reader = prototxt.TextReader.create(context.text);
-        const root = new mediapipe.Node(reader);
-        return new mediapipe.Model(root);
+    open(context, host) {
+        const identifier = context.identifier;
+        try {
+            const reader = prototxt.TextReader.create(context.text);
+            const root = new mediapipe.Object(reader);
+            return Promise.resolve(new mediapipe.Model(root));
+        }
+        catch (error) {
+            host.exception(error, false);
+            let message = error && error.message ? error.message : error.toString();
+            message = message.endsWith('.') ? message.substring(0, message.length - 1) : message;
+            return Promise.reject(new mediapipe.Error(message + " in '" + identifier + "'."));
+        }
     }
 };
 
@@ -34,14 +44,59 @@ mediapipe.Model = class {
     get format() {
         return 'MediaPipe';
     }
+
+    get graphs() {
+        return this._graphs;
+    }
 }
 
 mediapipe.Graph = class {
 
-    constructor(/* root */) {
+    constructor(root) {
         this._inputs = [];
-        this._ouputs = [];
+        this._outputs = [];
         this._nodes = [];
+
+        if (root) {
+            if (root.input_stream) {
+                const inputs = Array.isArray(root.input_stream) ? root.input_stream : [ root.input_stream ];
+                for (const input of inputs) {
+                    this._inputs.push(new mediapipe.Parameter(input, [
+                        new mediapipe.Argument(input, null, null)
+                    ]));
+                }
+            }
+            if (root.output_stream) {
+                const outputs = Array.isArray(root.output_stream) ? root.output_stream : [ root.output_stream ];
+                for (const output of outputs) {
+                    this._outputs.push(new mediapipe.Parameter(output, [
+                        new mediapipe.Argument(output, null, null)
+                    ]));
+                }
+            }
+            if (root.input_side_packet) {
+                const inputs = Array.isArray(root.input_side_packet) ? root.input_side_packet : [ root.input_side_packet ];
+                for (const input of inputs) {
+                    this._inputs.push(new mediapipe.Parameter(input, [
+                        new mediapipe.Argument(input, null, null)
+                    ]));
+                }
+            }
+            if (root.output_side_packet) {
+                const outputs = Array.isArray(root.output_side_packet) ? root.output_side_packet : [ root.output_side_packet ];
+                for (const output of outputs) {
+                    this._outputs.push(new mediapipe.Parameter(output, [
+                        new mediapipe.Argument(output, null, null)
+                    ]));
+                }
+            }
+            if (root.node) {
+                const nodes = Array.isArray(root.node) ? root.node : [ root.node ];
+                for (const node of nodes) {
+                    this._nodes.push(new mediapipe.Node(node));
+                }
+            }
+        }
     }
 
     get inputs() {
@@ -57,18 +112,213 @@ mediapipe.Graph = class {
     }
 }
 
+mediapipe.Node = class {
 
+    constructor(node) {
+        this._type = node.calculator || '?';
+        this._type = this._type.replace(/Calculator$/, '');
+        this._inputs = [];
+        this._outputs = [];
+        this._attributes = [];
 
-mediapipe.Node = class {
+        if (node.input_stream) {
+            let args = [];
+            const inputs = Array.isArray(node.input_stream) ? node.input_stream : [ node.input_stream ];
+            for (const input of inputs) {
+                let parts = input.split(':');
+                const type = (parts.length > 1) ? parts.shift() : '';
+                const name = parts.shift();
+                args.push(new mediapipe.Argument(name, type, null));
+            }
+            this._inputs.push(new mediapipe.Parameter('input_stream', args));
+        }
+        if (node.output_stream) {
+            let args = [];
+            const outputs = Array.isArray(node.output_stream) ? node.output_stream : [ node.output_stream ];
+            for (const output of outputs) {
+                let parts = output.split(':');
+                const type = (parts.length > 1) ? parts.shift() : '';
+                const name = parts.shift(); 
+                args.push(new mediapipe.Argument(name, type, null));
+            }
+            this._outputs.push(new mediapipe.Parameter('output_stream', args));
+        }
+        if (node.input_side_packet) {
+            let args = [];
+            const inputs = Array.isArray(node.input_side_packet) ? node.input_side_packet : [ node.input_side_packet ];
+            for (const input of inputs) {
+                let parts = input.split(':');
+                const type = (parts.length > 1) ? parts.shift() : '';
+                const name = parts.shift();
+                args.push(new mediapipe.Argument(name, type, null));
+            }
+            this._inputs.push(new mediapipe.Parameter('input_side_packet', args));
+        }
+        if (node.output_side_packet) {
+            let args = [];
+            const outputs = Array.isArray(node.output_side_packet) ? node.output_side_packet : [ node.output_side_packet ];
+            for (const output of outputs) {
+                let parts = output.split(':');
+                const type = (parts.length > 1) ? parts.shift() : '';
+                const name = parts.shift(); 
+                args.push(new mediapipe.Argument(name, type, null));
+            }
+            this._outputs.push(new mediapipe.Parameter('output_side_packet', args));
+        }
+        let options = node.options || node.node_options || null; 
+        if (options) {
+            for (const key of Object.keys(options)) {
+                if (key === '__type__') {
+                    continue;
+                }
+                const value = options[key];
+                this._attributes.push(new mediapipe.Attribute(key, value));
+            }
+        }
+    }
+
+    get name() {
+        return '';
+    }
+
+    get operator() {
+        return this._type;
+    }
+
+    get documentation() {
+        return '';
+    }
+
+    get category() {
+        return '';
+    }
+
+    get inputs() {
+        return this._inputs;
+    }
+
+    get outputs() {
+        return this._outputs;
+    }
+
+    get attributes() {
+        return this._attributes;
+    }
+}
 
-    constructor(/* reader */) {
-        /*
+mediapipe.Attribute = class {
+
+    constructor(name, value) {
+        this._name = name;
+        this._value = value;
+    }
+
+    get name() {
+        return this._name;
+    }
+
+    get value() {
+        return this._value;
+    }
+
+    get visible() {
+        return true;
+    }
+}
+
+mediapipe.Parameter = class {
+
+    constructor(name, args) {
+        this._name = name;
+        this._arguments = args;
+    }
+
+    get name() {
+        return this._name;
+    }
+
+    get visible() {
+        return true;
+    }
+
+    get arguments() {
+        return this._arguments;
+    }
+};
+
+mediapipe.Argument = class {
+
+    constructor(id, type, initializer) {
+        this._id = id;
+        this._type = type || null;
+        this._initializer = initializer || null;
+    }
+
+    get id() {
+        return this._id;
+    }
+
+    get type() {
+        if (this._type) {
+            return this._type;
+        }
+        if (this._initializer) {
+            return this._initializer.type;
+        }
+        return null;
+    }
+
+    get initializer() {
+        return this._initializer;
+    }
+};
+
+
+mediapipe.Object = class {
+
+    constructor(reader) {
         reader.start();
+
+        let close = false;
+        const type = reader.peek();
+        if (type.startsWith('[') && type.endsWith(']')) {
+            this.__type__ = reader.read().substring(0, type.length - 1);
+            reader.match(':');
+            reader.start();
+            close = true;
+        }
         while (!reader.end()) {
-            // debugger;
             var tag = reader.tag();
+            var next = reader.peek();
+            var obj = null;
+            if (next === '{') {
+                obj = new mediapipe.Object(reader);
+            }
+            else if (next.startsWith('"') && next.endsWith('"')) {
+                obj = reader.read().substring(1, next.length - 1);
+            }
+            else if (next === 'true' || next === 'false') {
+                obj = reader.read();
+            }
+            else if (!isNaN(next)) {
+                obj = parseFloat(reader.read());
+            }
+            else {
+                obj = reader.read();
+            }
+            if (this[tag] && !Array.isArray(this[tag])) {
+                this[tag] = [ this[tag] ];
+            }
+            if (this[tag]) {
+                this[tag].push(obj);
+            }
+            else {
+                this[tag] = obj;
+            }
+        }
+        if (close) {
+            reader.expect('}');
         }
-        */
     }
 }
 

+ 5 - 1
src/pytorch.js

@@ -1263,6 +1263,7 @@ pytorch.Execution = class {
                 case 'i2': this.name = 'int16'; this.itemsize = 2; break;
                 case 'i4': this.name = 'int32'; this.itemsize = 4; break;
                 case 'i8': this.name = 'int64'; this.itemsize = 8; break;
+                case 'b1': this.name = 'uint8'; this.itemsize = 1; break;
                 case 'u1': this.name = 'uint8'; this.itemsize = 1; break;
                 case 'u2': this.name = 'uint16'; this.itemsize = 2; break;
                 case 'u4': this.name = 'uint32'; this.itemsize = 4; break;
@@ -1345,7 +1346,7 @@ pytorch.Execution = class {
                 else {
                     array.data = this.rawdata;
                     if (array.data.length != size) {
-                        throw new pytorch.Error('Invalid array data size.');
+                        // throw new pytorch.Error('Invalid array data size.');
                     }
                 }
                 return array;
@@ -1420,6 +1421,8 @@ pytorch.Execution = class {
                     return dataView.getFloat32(0, true);
                 case 'float64':
                     return dataView.getFloat64(0, true);
+                case 'uint8':
+                    return dataView.getUint8(0, true);
                 case 'int8':
                     return dataView.getInt8(0, true);
                 case 'int16':
@@ -3028,6 +3031,7 @@ pytorch.Container.Zip.Execution = class extends pytorch.Execution {
                             case 'torch.max_pool2d': 
                             case 'torch.quantize_per_tensor':
                             case 'torch.relu_':
+                            case 'torch.hardtanh_':
                             case 'torch.slice': {
                                 parameter.size = [ undefined, undefined, undefined, undefined ];
                                 break;

+ 60 - 94
test/models.json

@@ -202,13 +202,6 @@
     "format": "Caffe v2",
     "link":   "https://github.com/BVLC/caffe/wiki/Model-Zoo#deepyeast"
   },
-  {
-    "type":   "caffe",
-    "target": "DenseNet_121.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B7ubpZO7HnlCcHlfNmJkU2VPelE",
-    "format": "Caffe v2",
-    "link":   "https://github.com/shicai/DenseNet-Caffe"
-  },
   {
     "type":   "caffe",
     "target": "DenseNet_121.prototxt",
@@ -244,13 +237,6 @@
     "error":  "File text format is not caffe.NetParameter (Couldn't parse float '$SCALE_WIDTH' at 727:59) in 'deploy_iResNet_ROB.tpl.prototxt'.",
     "link":   "https://github.com/leonzfa/iResNet"
   },
-  {
-    "type":   "caffe",
-    "target": "dpn92.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B9mkjlmP0d7zTmh2M3RKSVFTWjQ",
-    "format": "Caffe v2",
-    "link":   "https://github.com/soeaver/caffe-model/tree/master/cls#performance-on-imagenet-validation"
-  },
   {
     "type":   "caffe",
     "target": "faster_rcnn_train_test_21cls.pt",
@@ -349,52 +335,38 @@
     "format": "Caffe v1",
     "link":   "http://places.csail.mit.edu/downloadCNN.html"
   },
-  {
-    "type":   "caffe",
-    "target": "inception-v3.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B9mkjlmP0d7zRktmbmNZeTVBZVk",
-    "format": "Caffe v2",
-    "link":   "https://github.com/soeaver/caffe-model/tree/master/cls#performance-on-imagenet-validation"
-  },
-  {
-    "type":   "caffe",
-    "target": "inception-v4.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B9mkjlmP0d7zNWZEaU8wMEQ2dWM",
-    "format": "Caffe v2",
-    "link":   "https://github.com/soeaver/caffe-model/tree/master/cls#performance-on-imagenet-validation"
-  },
   {
     "type":   "caffe",
     "target": "inceptionv3.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/inceptionv3.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/inceptionv3.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "inceptionv3_orig.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/inceptionv3_orig.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/inceptionv3_orig.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "inceptionv4.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/inceptionv4.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/inceptionv4.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "inceptionv4_resnet.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/inceptionv4_resnet.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/inceptionv4_resnet.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "inceptionv4_resnet.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/inceptionv4_resnet.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/inceptionv4_resnet.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
@@ -506,7 +478,7 @@
   {
     "type":   "caffe",
     "target": "panoramic_object_detection_deploy_crop.prototxt.prototxt",
-    "source": "https://raw.githubusercontent.com/gdlg/panoramic-object-detection/master/examples/inference/deploy_crop.prototxt",
+    "source": "https://github.com/gdlg/panoramic-object-detection/blob/master/examples/inference/deploy_crop.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/gdlg/panoramic-object-detection/tree/master/examples/inference"
   },
@@ -524,31 +496,24 @@
     "format": "Caffe v2",
     "link":   "https://github.com/CMU-Perceptual-Computing-Lab/openpose"
   },
-  {
-    "type":   "caffe",
-    "target": "pspnet50_ADE20K.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0BzaU285cX7TCN1R3QnUwQ0hoMTA",
-    "error":  "File format is not caffe.NetParameter (invalid wire type 7 at offset 8482) in 'pspnet50_ADE20K.caffemodel'.",
-    "link":   "https://github.com/hszhao/PSPNet"
-  },
   {
     "type":   "caffe",
     "target": "ResNet-18-deploy.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/ResNet-18-deploy.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/ResNet-18-deploy.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "ResNet-34.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/ResNet-34.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/ResNet-34.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
   {
     "type":   "caffe",
     "target": "resnet-50.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/resnet-50.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/resnet-50.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
@@ -575,7 +540,7 @@
   {
     "type":   "caffe",
     "target": "ResNet-101-deploy.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/ResNet-101-deploy.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/ResNet-101-deploy.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },
@@ -603,7 +568,7 @@
   {
     "type":   "caffe",
     "target": "resnet-152.prototxt",
-    "source": "https://raw.githubusercontent.com/cwlacewe/netscope/master/presets/resnet-152.prototxt",
+    "source": "https://github.com/cwlacewe/netscope/blob/master/presets/resnet-152.prototxt?raw=true",
     "format": "Caffe v2",
     "link":   "https://github.com/cwlacewe/netscope"
   },  
@@ -622,13 +587,6 @@
     "format": "Caffe v2",
     "link":   "https://github.com/jasjeetIM/Seq2Seq"
   },
-  {
-    "type":   "caffe",
-    "target": "se_resnet_50_v1.caffemodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B7ubpZO7HnlCWkwtSG5CdXBKcmc",
-    "format": "Caffe v2",
-    "link":   "https://github.com/shicai/SENet-Caffe"
-  },
   {
     "type":   "caffe",
     "target": "se_resnet_50_v1_deploy.prototxt",
@@ -1305,12 +1263,6 @@
     "source": "https://raw.githubusercontent.com/ileafsolutions/StyleArt/master/StyleArt/CoreMLModels/FNS-Candy.mlmodel",
     "link":   "https://github.com/ileafsolutions/StyleArt"
   },
-  {
-    "type":   "coreml",
-    "target": "Food101.mlmodel",
-    "source": "https://drive.google.com/uc?export=download&id=0B5TjkH3njRqnVjBPZGRZbkNITjA",
-    "link":   "https://github.com/SwiftBrain/awesome-CoreML-models"
-  },
   {
     "type":   "coreml",
     "target": "food.mlmodel",
@@ -1993,70 +1945,70 @@
   {
     "type":   "dl4j",
     "target": "darknet19_dl4j_inference.v2.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/darknet19_dl4j_inference.v2.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/darknet19_dl4j_inference.v2.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "lenet_dl4j_mnist_inference.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/lenet_dl4j_mnist_inference.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/lenet_dl4j_mnist_inference.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "nasnetmobile_dl4j_inference.v1.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/nasnetmobile_dl4j_inference.v1.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/nasnetmobile_dl4j_inference.v1.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "resnet50_dl4j_inference.v3.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/resnet50_dl4j_inference.v3.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/resnet50_dl4j_inference.v3.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "squeezenet_dl4j_inference.v2.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/squeezenet_dl4j_inference.v2.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/squeezenet_dl4j_inference.v2.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "tiny-yolo-voc_dl4j_inference.v2.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/tiny-yolo-voc_dl4j_inference.v2.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/tiny-yolo-voc_dl4j_inference.v2.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "unet_dl4j_segment_inference.v1.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/unet_dl4j_segment_inference.v1.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/unet_dl4j_segment_inference.v1.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "vgg19_dl4j_inference.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/vgg19_dl4j_inference.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/vgg19_dl4j_inference.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "xception_dl4j_inference.v2.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/xception_dl4j_inference.v2.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/xception_dl4j_inference.v2.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
   {
     "type":   "dl4j",
     "target": "yolo2_dl4j_inference.v3.zip",
-    "source": "https://deeplearning4jblob.blob.core.windows.net/models/yolo2_dl4j_inference.v3.zip",
+    "source": "https://dl4jdata.blob.core.windows.net/models/yolo2_dl4j_inference.v3.zip",
     "format": "Deeplearning4j",
     "link":   "https://github.com/eclipse/deeplearning4j"
   },
@@ -2291,18 +2243,53 @@
     "link":   "https://keras.io/applications",
     "script": "./tools/keras sync install zoo"
   },
+  {
+    "type":   "mediapipe",
+    "target": "clipped_images_from_file_at_24fps.pbtxt",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/media_sequence/clipped_images_from_file_at_24fps.pbtxt?raw=true",
+    "format": "MediaPipe",
+    "link":   "https://github.com/google/mediapipe"
+  },
   {
     "type":   "mediapipe",
     "target": "face_detection_mobile_cpu.pbtxt",
-    "source": "https://raw.githubusercontent.com/google/mediapipe/master/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt",
-    "error":  "Unsupported file content for extension '.pbtxt' in 'face_detection_mobile_cpu.pbtxt'.",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/face_detection/face_detection_mobile_cpu.pbtxt?raw=true",
+    "format": "MediaPipe",
     "link":   "https://github.com/google/mediapipe"
   },
   {
     "type":   "mediapipe",
     "target": "hand_detection_mobile.pbtxt",
-    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt",
-    "error":  "Unsupported file content for extension '.pbtxt' in 'hand_detection_mobile.pbtxt'.",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_detection_mobile.pbtxt?raw=true",
+    "format": "MediaPipe",
+    "link":   "https://github.com/google/mediapipe"
+  },
+  {
+    "type":   "mediapipe",
+    "target": "hand_tracking_mobile.pbtxt",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/hand_tracking/hand_tracking_mobile.pbtxt?raw=true",
+    "format": "MediaPipe",
+    "link":   "https://github.com/google/mediapipe"
+  },
+  {
+    "type":   "mediapipe",
+    "target": "object_detection_desktop_live.pbtxt",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/examples/coral/graphs/object_detection_desktop_live.pbtxt?raw=true",
+    "format": "MediaPipe",
+    "link":   "https://github.com/google/mediapipe"
+  },
+  {
+    "type":   "mediapipe",
+    "target": "tvl1_flow_and_rgb_from_file.pbtxt",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/media_sequence/tvl1_flow_and_rgb_from_file.pbtxt?raw=true",
+    "format": "MediaPipe",
+    "link":   "https://github.com/google/mediapipe"
+  },
+  {
+    "type":   "mediapipe",
+    "target": "yt8m_dataset_model_inference.pbtxt",
+    "source": "https://github.com/google/mediapipe/blob/master/mediapipe/graphs/youtube8m/yt8m_dataset_model_inference.pbtxt?raw=true",
+    "format": "MediaPipe",
     "link":   "https://github.com/google/mediapipe"
   },
   {
@@ -5369,20 +5356,6 @@
     "format": "Torch v7",
     "link":   "https://github.com/nagadomi/waifu2x"
   },
-  {
-    "type":   "torch",
-    "target": "densenet-121.t7",
-    "source": "https://drive.google.com/uc?export=download&id=0B8ReS-sYUS-HWFViYlVlZk9sdHc",
-    "format": "Torch v7",
-    "link":   "https://github.com/liuzhuang13/DenseNet"
-  },
-  {
-    "type":   "torch",
-    "target": "densenet_cosine_264_k48.t7",
-    "source": "https://drive.google.com/uc?export=download&id=0By1NwtA2JPGzcnFDSE1HQVh4c0k",
-    "format": "Torch v7",
-    "link":   "https://github.com/liuzhuang13/DenseNet"
-  },
   {
     "type":   "torch",
     "target": "facades_photo2label.t7",
@@ -5425,13 +5398,6 @@
     "format": "Torch v7",
     "link":   "https://github.com/pyannote/pyannote-data"
   },
-  {
-    "type":   "torch",
-    "target": "portrait_584_net_D_cpu.t7",
-    "source": "https://drive.google.com/uc?export=download&id=1KJMUW0sOZ3CRjshCEsJPd76DN4GT6Otv",
-    "format": "Torch v7",
-    "link":   "https://github.com/robbiebarrat/art-DCGAN/issues/3"
-  },
   {
     "type":   "torch",
     "target": "resnet.t7",