Explorar o código

Rename match to target

Lutz Roeder %!s(int64=2) %!d(string=hai) anos
pai
achega
4a59e089fa

+ 2 - 2
source/acuity.js

@@ -14,9 +14,9 @@ acuity.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('acuity-metadata.json');
-        return new acuity.Model(metadata, match);
+        return new acuity.Model(metadata, target);
     }
 };
 

+ 3 - 3
source/armnn.js

@@ -20,11 +20,11 @@ armnn.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./armnn-schema');
         armnn.schema = flatbuffers.get('armnn').armnnSerializer;
         let model = null;
-        switch (match) {
+        switch (target) {
             case 'armnn.flatbuffers': {
                 try {
                     const stream = context.stream;
@@ -48,7 +48,7 @@ armnn.ModelFactory = class {
                 break;
             }
             default: {
-                throw new armnn.Error("Unsupported Arm NN '" + match + "'.");
+                throw new armnn.Error("Unsupported Arm NN '" + target + "'.");
             }
         }
         const metadata = await context.metadata('armnn-metadata.json');

+ 3 - 3
source/caffe.js

@@ -25,7 +25,7 @@ caffe.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./caffe-proto');
         caffe.proto = protobuf.get('caffe').caffe;
         const openModel = async (context, netParameter) => {
@@ -86,7 +86,7 @@ caffe.ModelFactory = class {
             }
             return openModel(context, netParameter);
         };
-        switch (match) {
+        switch (target) {
             case 'caffe.pbtxt.solver': {
                 const stream = context.stream;
                 const reader = protobuf.TextReader.open(stream);
@@ -128,7 +128,7 @@ caffe.ModelFactory = class {
                 return openModel(context, netParameter);
             }
             default: {
-                throw new caffe.Error("Unsupported Caffe format '" + match + "'.");
+                throw new caffe.Error("Unsupported Caffe format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/caffe2.js

@@ -45,14 +45,14 @@ caffe2.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./caffe2-proto');
         const metadata = await context.metadata('caffe2-metadata.json');
         const identifier = context.identifier;
         const parts = identifier.split('.');
         const extension = parts.pop().toLowerCase();
         const base = parts.join('.');
-        switch (match) {
+        switch (target) {
             case 'caffe2.pbtxt': {
                 const openText = (predictBuffer, initBuffer, initTextFormat) => {
                     let predict_net = null;
@@ -179,7 +179,7 @@ caffe2.ModelFactory = class {
                 }
             }
             default: {
-                throw new caffe2.Error("Unsupported Caffe2 format '" + match + "'.");
+                throw new caffe2.Error("Unsupported Caffe2 format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/circle.js

@@ -18,12 +18,12 @@ circle.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./circle-schema');
         circle.schema = flatbuffers.get('circle').circle;
         let model = null;
         const attachments = new Map();
-        switch (match) {
+        switch (target) {
             case 'circle.flatbuffers.json': {
                 try {
                     const obj = context.open('json');
@@ -57,7 +57,7 @@ circle.ModelFactory = class {
                 break;
             }
             default: {
-                throw new circle.Error("Unsupported Circle format '" + match + "'.");
+                throw new circle.Error("Unsupported Circle format '" + target + "'.");
             }
         }
         const metadata = await context.metadata('circle-metadata.json');

+ 3 - 3
source/cntk.js

@@ -23,9 +23,9 @@ cntk.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('cntk-metadata.json');
-        switch (match) {
+        switch (target) {
             case 'cntk.v1': {
                 let obj = null;
                 try {
@@ -55,7 +55,7 @@ cntk.ModelFactory = class {
                 return new cntk.Model(metadata, 2, obj);
             }
             default: {
-                throw new cntk.Error("Unsupported CNTK format '" + match + "'.");
+                throw new cntk.Error("Unsupported CNTK format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/coreml.js

@@ -64,7 +64,7 @@ coreml.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./coreml-proto');
         const metadata = await context.metadata('coreml-metadata.json');
         const openModel = async (stream, context, path, format) => {
@@ -147,7 +147,7 @@ coreml.ModelFactory = class {
             const obj = reader.read();
             return openManifest(obj, context, path);
         };
-        switch (match) {
+        switch (target) {
             case 'coreml.pb': {
                 return openModel(context.stream, context, context.identifier);
             }
@@ -163,7 +163,7 @@ coreml.ModelFactory = class {
                 return openManifestStream(context, '../../../');
             }
             default: {
-                throw new coreml.Error("Unsupported Core ML format '" + match + "'.");
+                throw new coreml.Error("Unsupported Core ML format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/darknet.js

@@ -38,7 +38,7 @@ darknet.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('darknet-metadata.json');
         const openModel = (metadata, cfg, weights) => {
             return new darknet.Model(metadata, cfg, darknet.Weights.open(weights));
@@ -47,7 +47,7 @@ darknet.ModelFactory = class {
         const parts = identifier.split('.');
         parts.pop();
         const basename = parts.join('.');
-        switch (match) {
+        switch (target) {
             case 'darknet.weights': {
                 const stream = await context.request(basename + '.cfg', null);
                 const buffer = stream.read();
@@ -62,7 +62,7 @@ darknet.ModelFactory = class {
                 }
             }
             default: {
-                throw new darknet.Error("Unsupported Darknet format '" + match + "'.");
+                throw new darknet.Error("Unsupported Darknet format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/dl4j.js

@@ -24,9 +24,9 @@ dl4j.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('dl4j-metadata.json');
-        switch (match) {
+        switch (target) {
             case 'dl4j.configuration': {
                 const obj = context.open('json');
                 try {
@@ -43,7 +43,7 @@ dl4j.ModelFactory = class {
                 return new dl4j.Model(metadata, obj, context.stream.peek());
             }
             default: {
-                throw new dl4j.Error("Unsupported Deeplearning4j format '" + match + "'.");
+                throw new dl4j.Error("Unsupported Deeplearning4j format '" + target + "'.");
             }
         }
     }

+ 2 - 2
source/dlc.js

@@ -8,10 +8,10 @@ dlc.ModelFactory = class {
         return dlc.Container.open(context);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./dlc-schema');
         dlc.schema = flatbuffers.get('dlc').dlc;
-        const container = match;
+        const container = target;
         let model = null;
         let params = null;
         const metadata_props = container.metadata;

+ 2 - 2
source/hailo.js

@@ -10,9 +10,9 @@ hailo.ModelFactory = class {
         return hailo.Container.open(context);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('hailo-metadata.json');
-        return new hailo.Model(metadata, match);
+        return new hailo.Model(metadata, target);
     }
 };
 

+ 2 - 2
source/hickle.js

@@ -11,8 +11,8 @@ hickle.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
-        return new hickle.Model(match);
+    async open(context, target) {
+        return new hickle.Model(target);
     }
 };
 

+ 3 - 3
source/keras.js

@@ -33,12 +33,12 @@ keras.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const openModel = async (format, producer, backend, config, weights) => {
             const metadata = await context.metadata('keras-metadata.json');
             return new keras.Model(metadata, format, producer, backend, config, weights);
         };
-        switch (match) {
+        switch (target) {
             case 'keras.h5': {
                 const find_root_group = (root_group) => {
                     const kerasmodel = root_group.group('model/kerasmodel');
@@ -252,7 +252,7 @@ keras.ModelFactory = class {
                 return openModel(format, '', backend, model_config, weights);
             }
             default: {
-                throw new keras.Error("Unsupported Keras format '" + match + "'.");
+                throw new keras.Error("Unsupported Keras format '" + target + "'.");
             }
         }
     }

+ 2 - 2
source/kmodel.js

@@ -8,8 +8,8 @@ kmodel.ModelFactory = class {
         return kmodel.Reader.open(context.stream);
     }
 
-    async open(context, match) {
-        return new kmodel.Model(match);
+    async open(context, target) {
+        return new kmodel.Model(target);
     }
 };
 

+ 2 - 2
source/lasagne.js

@@ -13,9 +13,9 @@ lasagne.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('lasagne-metadata.json');
-        return new lasagne.Model(metadata, match);
+        return new lasagne.Model(metadata, target);
     }
 };
 

+ 3 - 3
source/lightgbm.js

@@ -17,10 +17,10 @@ lightgbm.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         let obj;
         let format;
-        switch (match) {
+        switch (target) {
             case 'lightgbm.pickle': {
                 obj = context.open('pkl');
                 format = 'LightGBM Pickle';
@@ -38,7 +38,7 @@ lightgbm.ModelFactory = class {
                 break;
             }
             default: {
-                throw new lightgbm.Error("Unsupported LightGBM format '" + match + "'.");
+                throw new lightgbm.Error("Unsupported LightGBM format '" + target + "'.");
             }
         }
         return new lightgbm.Model(obj, format);

+ 5 - 5
source/megengine.js

@@ -35,12 +35,12 @@ megengine.ModelFactory = class {
         return '';
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('megengine-metadata.json');
-        switch (match) {
+        switch (target) {
             case 'megengine.tm': {
                 const obj = context.open('pkl');
-                return new megengine.Model(metadata, obj, match);
+                return new megengine.Model(metadata, obj, target);
             }
             case 'megengine.mge': {
                 await context.require('./megengine-schema');
@@ -58,10 +58,10 @@ megengine.ModelFactory = class {
                     const message = error && error.message ? error.message : error.toString();
                     throw new megengine.Error('File format is not megengine.Model (' + message.replace(/\.$/, '') + ').');
                 }
-                return new megengine.Model(metadata, model, match);
+                return new megengine.Model(metadata, model, target);
             }
             default: {
-                throw new megengine.Error("Unsupported MegEngine format '" + match.replace(/^megengine\./, '') + "'.");
+                throw new megengine.Error("Unsupported MegEngine format '" + target.replace(/^megengine\./, '') + "'.");
             }
         }
     }

+ 3 - 3
source/mxnet.js

@@ -24,7 +24,7 @@ mxnet.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('mxnet-metadata.json');
         const basename = (base, identifier, extension, suffix, append) => {
             if (!base) {
@@ -180,7 +180,7 @@ mxnet.ModelFactory = class {
             return new mxnet.Model(metadata, manifest, symbol, parameters);
         };
         const identifier = context.identifier;
-        switch (match) {
+        switch (target) {
             case 'mxnet.json': {
                 let symbol = null;
                 try {
@@ -224,7 +224,7 @@ mxnet.ModelFactory = class {
                 return requestSymbol(manifest);
             }
             default: {
-                throw new mxnet.Error("Unsupported MXNet format '" + match + "'.");
+                throw new mxnet.Error("Unsupported MXNet format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/ncnn.js

@@ -55,7 +55,7 @@ ncnn.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const openBinary = (param, bin) => {
             const reader = new ncnn.BinaryParamReader(param);
             return new ncnn.Model(metadata, reader, bin);
@@ -67,7 +67,7 @@ ncnn.ModelFactory = class {
         const metadata = await context.metadata('ncnn-metadata.json');
         const identifier = context.identifier.toLowerCase();
         let bin = null;
-        switch (match) {
+        switch (target) {
             case 'ncnn.model': {
                 if (identifier.endsWith('.param')) {
                     bin = context.identifier.substring(0, context.identifier.length - 6) + '.bin';
@@ -110,7 +110,7 @@ ncnn.ModelFactory = class {
                 }
             }
             default: {
-                throw new ncnn.Error("Unsupported ncnn format '" + match + "'.");
+                throw new ncnn.Error("Unsupported ncnn format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/nnabla.js

@@ -16,10 +16,10 @@ nnabla.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./nnabla-proto');
         nnabla.proto = protobuf.get('nnabla').nnabla;
-        switch (match) {
+        switch (target) {
             case 'nnabla.pbtxt': {
                 const stream = context.stream;
                 const reader = protobuf.TextReader.open(stream);
@@ -44,7 +44,7 @@ nnabla.ModelFactory = class {
                 }
             }
             default: {
-                throw new nnabla.Error("Unsupported nnabla format '" + match + "'.");
+                throw new nnabla.Error("Unsupported nnabla format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/nnef.js

@@ -25,8 +25,8 @@ nnef.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
-        switch (match) {
+    async open(context, target) {
+        switch (target) {
             case 'nnef.graph': {
                 const stream = context.stream;
                 const reader = nnef.TextReader.open(stream);
@@ -36,7 +36,7 @@ nnef.ModelFactory = class {
                 throw new nnef.Error('NNEF dat format support not implemented.');
             }
             default: {
-                throw new nnef.Error("Unsupported NNEF format '" + match + "'.");
+                throw new nnef.Error("Unsupported NNEF format '" + target + "'.");
             }
         }
     }

+ 7 - 7
source/numpy.js

@@ -32,10 +32,10 @@ numpy.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         let format = '';
         const graphs = [];
-        switch (match.name) {
+        switch (target.name) {
             case 'npy': {
                 format = 'NumPy Array';
                 const execution = new python.Execution();
@@ -51,7 +51,7 @@ numpy.ModelFactory = class {
                 format = 'NumPy Zip';
                 const layers = new Map();
                 const execution = new python.Execution();
-                for (const entry of match.value) {
+                for (const entry of target.value) {
                     if (!entry[0].endsWith('.npy')) {
                         throw new numpy.Error("Invalid file name '" + entry.name + "'.");
                     }
@@ -84,7 +84,7 @@ numpy.ModelFactory = class {
                     }
                     return layers.get(name);
                 };
-                const weights = match.value;
+                const weights = target.value;
                 let separator = undefined;
                 if (Array.from(weights.keys()).every((key) => key.indexOf('.') !== -1)) {
                     separator = '.';
@@ -117,14 +117,14 @@ numpy.ModelFactory = class {
                 format = 'NumPy NDArray';
                 const layer = {
                     type: 'numpy.ndarray',
-                    parameters: [ { name: 'value', tensor: { name: '', array: match.value } } ]
+                    parameters: [ { name: 'value', tensor: { name: '', array: target.value } } ]
                 };
                 graphs.push({ layers: [ layer ] });
                 break;
             }
             case 'dnnlib.tflib.network': {
                 format = 'dnnlib';
-                for (const obj of match.value) {
+                for (const obj of target.value) {
                     const layers = new Map();
                     for (const entry of obj.variables) {
                         const name = entry[0];
@@ -148,7 +148,7 @@ numpy.ModelFactory = class {
                 break;
             }
             default: {
-                throw new numpy.Error("Unsupported NumPy format '" + match.name + "'.");
+                throw new numpy.Error("Unsupported NumPy format '" + target.name + "'.");
             }
         }
         return new numpy.Model(format, graphs);

+ 2 - 2
source/om.js

@@ -11,8 +11,8 @@ om.ModelFactory = class {
         return om.Container.open(context);
     }
 
-    async open(context, match) {
-        const container = match;
+    async open(context, target) {
+        const container = target;
         await container.open();
         const metadata = await context.metadata('om-metadata.json');
         return new om.Model(metadata, container);

+ 2 - 2
source/onednn.js

@@ -13,9 +13,9 @@ onednn.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('onednn-metadata.json');
-        return new onednn.Model(metadata, match);
+        return new onednn.Model(metadata, target);
     }
 };
 

+ 3 - 3
source/onnx.js

@@ -161,12 +161,12 @@ onnx.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const open = async (model, format) => {
             const metadata = await onnx.Metadata.open(context);
             return new onnx.Model(metadata, model, format);
         };
-        switch (match) {
+        switch (target) {
             case 'onnx.pbtxt.ModelProto':
                 await context.require('./onnx-proto');
                 try {
@@ -268,7 +268,7 @@ onnx.ModelFactory = class {
                 throw new onnx.Error('Unsupported Pickle content.');
             }
             default: {
-                throw new onnx.Error("Unsupported ONNX format '" + match + "'.");
+                throw new onnx.Error("Unsupported ONNX format '" + target + "'.");
             }
         }
     }

+ 3 - 3
source/openvino.js

@@ -42,7 +42,7 @@ openvino.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const open = async (stream, bin) => {
             const metadata = await context.metadata('openvino-metadata.json');
             let document = null;
@@ -60,7 +60,7 @@ openvino.ModelFactory = class {
             return new openvino.Model(metadata, net, bin);
         };
         const identifier = context.identifier;
-        switch (match) {
+        switch (target) {
             case 'openvino.xml':
                 try {
                     const stream = await context.request(identifier.substring(0, identifier.length - 4) + '.bin', null);
@@ -74,7 +74,7 @@ openvino.ModelFactory = class {
                 return open(stream, context.stream.peek());
             }
             default:
-                throw new openvino.Error("Unsupported OpenVINO format '" + match + "'.");
+                throw new openvino.Error("Unsupported OpenVINO format '" + target + "'.");
         }
     }
 };

+ 8 - 8
source/paddle.js

@@ -38,9 +38,9 @@ paddle.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('paddle-metadata.json');
-        switch (match) {
+        switch (target) {
             case 'paddle.naive': {
                 await context.require('./paddle-schema');
                 paddle.schema = flatbuffers.get('paddlelite').paddle.lite.fbs.proto;
@@ -54,9 +54,9 @@ paddle.ModelFactory = class {
                 const parts = identifier.split('.');
                 const extension = parts.pop().toLowerCase();
                 const base = parts.join('.');
-                const openProgram = (stream, match) => {
+                const openProgram = (stream, target) => {
                     const program = {};
-                    switch (match) {
+                    switch (target) {
                         case 'paddle.pbtxt': {
                             try {
                                 const reader = protobuf.TextReader.open(stream);
@@ -78,7 +78,7 @@ paddle.ModelFactory = class {
                             break;
                         }
                         default: {
-                            throw new paddle.Error("Unsupported Paddle format '" + match + "'.");
+                            throw new paddle.Error("Unsupported Paddle format '" + target + "'.");
                         }
                     }
                     const formatVersion = (version) => {
@@ -140,7 +140,7 @@ paddle.ModelFactory = class {
                     }
                     return weights;
                 };
-                switch (match) {
+                switch (target) {
                     case 'paddle.pickle': {
                         const container = paddle.Pickle.open(context);
                         return createModel(metadata, container.format, null, container.weights);
@@ -178,7 +178,7 @@ paddle.ModelFactory = class {
                             const container = new paddle.Pickle(obj);
                             return container.weights || new Map();
                         };
-                        const program = openProgram(context.stream, match);
+                        const program = openProgram(context.stream, target);
                         if (extension === 'pdmodel') {
                             try {
                                 const stream = await context.request(base + '.pdiparams', null);
@@ -224,7 +224,7 @@ paddle.ModelFactory = class {
                         return loadEntries(context, program);
                     }
                     default: {
-                        throw new paddle.Error("Unsupported PaddlePaddle format '" + match + "'.");
+                        throw new paddle.Error("Unsupported PaddlePaddle format '" + target + "'.");
                     }
                 }
             }

+ 2 - 2
source/pickle.js

@@ -22,9 +22,9 @@ pickle.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         let format = 'Pickle';
-        const obj = match;
+        const obj = target;
         if (obj === null || obj === undefined) {
             context.exception(new pickle.Error("Unsupported Pickle null object in '" + context.identifier + "'."));
         } else if (Array.isArray(obj)) {

+ 9 - 14
source/pytorch.js

@@ -12,14 +12,13 @@ pytorch.ModelFactory = class {
         return pytorch.Container.open(context);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await pytorch.Metadata.open(context);
-        const container = match;
-        container.metadata = metadata;
+        const container = target;
         container.on('resolve', (_, name) => {
             context.exception(new pytorch.Error("Unknown type name '" + name + "'."), false);
         });
-        await container.read();
+        await container.read(metadata);
         return new pytorch.Model(metadata, container);
     }
 };
@@ -31,7 +30,8 @@ pytorch.Model = class {
         this._producer = container.producer || '';
         this._graphs = [];
         for (const entry of container.modules) {
-            this._graphs.push(new pytorch.Graph(metadata, entry[0], entry[1], container));
+            const graph = new pytorch.Graph(metadata, entry[0], entry[1]);
+            this._graphs.push(graph);
         }
     }
 
@@ -808,17 +808,12 @@ pytorch.Container = class {
     }
 
     constructor() {
-        this._metadata = null;
         this._events = [];
     }
 
     async read() {
     }
 
-    set metadata(value) {
-        this._metadata = value;
-    }
-
     on(event, callback) {
         this._events.push([ event, callback ]);
     }
@@ -988,10 +983,10 @@ pytorch.Container.Mobile = class extends pytorch.Container {
         this._context = context;
     }
 
-    async read() {
+    async read(metadata) {
         await this._context.require('./pytorch-schema');
         this._modules = new Map();
-        const execution = new pytorch.jit.Execution(null, this._metadata);
+        const execution = new pytorch.jit.Execution(null, metadata);
         for (const event in this._events) {
             execution.on(event[0], event[1]);
         }
@@ -1065,8 +1060,8 @@ pytorch.Container.Zip = class extends pytorch.Container {
         }
     }
 
-    async read() {
-        const execution = new pytorch.jit.Execution(null, this._metadata);
+    async read(metadata) {
+        const execution = new pytorch.jit.Execution(null, metadata);
         for (const event in this._events) {
             execution.on(event[0], event[1]);
         }

+ 2 - 2
source/rknn.js

@@ -10,11 +10,11 @@ rknn.ModelFactory = class {
         return rknn.Container.open(context);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./rknn-schema');
         rknn.schema = flatbuffers.get('rknn').rknn;
         const metadata = await context.metadata('rknn-metadata.json');
-        const container = match;
+        const container = target;
         const type = container.type;
         switch (type) {
             case 'json': {

+ 2 - 2
source/safetensors.js

@@ -18,10 +18,10 @@ safetensors.ModelFactory = class {
         return '';
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const stream = context.stream;
         stream.seek(8);
-        const buffer = stream.read(match.size);
+        const buffer = stream.read(target.size);
         const reader = json.TextReader.open(buffer);
         const obj = reader.read();
         const model = new safetensors.Model(obj, stream.position, stream);

+ 2 - 2
source/server.js

@@ -21,8 +21,8 @@ message.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
-        return new message.Model(match);
+    async open(context, target) {
+        return new message.Model(target);
     }
 };
 

+ 6 - 6
source/sklearn.js

@@ -38,21 +38,21 @@ sklearn.ModelFactory = class {
         return null;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('sklearn-metadata.json');
         const obj = context.open('pkl');
-        return new sklearn.Model(metadata, match, obj);
+        return new sklearn.Model(metadata, target, obj);
     }
 };
 
 sklearn.Model = class {
 
-    constructor(metadata, match, obj) {
+    constructor(metadata, target, obj) {
         const formats = new Map([ [ 'sklearn', 'scikit-learn' ], [ 'scipy', 'SciPy' ], [ 'hmmlearn', 'hmmlearn' ] ]);
-        this._format = formats.get(match.split('.').shift());
+        this._format = formats.get(target.split('.').shift());
         this._graphs = [];
         const version = [];
-        switch (match) {
+        switch (target) {
             case 'sklearn':
             case 'scipy':
             case 'hmmlearn': {
@@ -86,7 +86,7 @@ sklearn.Model = class {
                 break;
             }
             default: {
-                throw new sklearn.Error("Unsupported scikit-learn format '" + match + "'.");
+                throw new sklearn.Error("Unsupported scikit-learn format '" + target + "'.");
             }
         }
         if (version.length > 0 && version.every((value) => value === version[0])) {

+ 2 - 2
source/tengine.js

@@ -10,9 +10,9 @@ tengine.ModelFactory = class {
         return tengine.Reader.open(context.stream);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await tengine.Metadata.open(context);
-        return new tengine.Model(metadata, match);
+        return new tengine.Model(metadata, target);
     }
 };
 

+ 2 - 2
source/tensorrt.js

@@ -9,8 +9,8 @@ tensorrt.ModelFactory = class {
         return tensorrt.Engine.open(stream) || tensorrt.Container.open(stream);
     }
 
-    async open(context, match) {
-        return new tensorrt.Model(null, match);
+    async open(context, target) {
+        return new tensorrt.Model(null, target);
     }
 };
 

+ 3 - 3
source/tf.js

@@ -222,7 +222,7 @@ tf.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./tf-proto');
         tf.proto = protobuf.get('tf');
         const openModel = async (saved_model, format, producer, bundle) => {
@@ -650,7 +650,7 @@ tf.ModelFactory = class {
             saved_model.meta_graphs.push(meta_graph);
             return openSavedModel(saved_model, format, null);
         };
-        switch (match) {
+        switch (target) {
             case 'tf.bundle':
                 return openBundle(context);
             case 'tf.data':
@@ -680,7 +680,7 @@ tf.ModelFactory = class {
             case 'tf.pb.mmap':
                 return openMemmapped(context);
             default:
-                throw new tf.Error("Unsupported TensorFlow format '" + match + "'.");
+                throw new tf.Error("Unsupported TensorFlow format '" + target + "'.");
         }
     }
 };

+ 3 - 3
source/tflite.js

@@ -31,12 +31,12 @@ tflite.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./tflite-schema');
         tflite.schema = flatbuffers.get('tflite').tflite;
         let model = null;
         const attachments = new Map();
-        switch (match) {
+        switch (target) {
             case 'tflite.flatbuffers.json': {
                 try {
                     const obj = context.open('json');
@@ -70,7 +70,7 @@ tflite.ModelFactory = class {
                 break;
             }
             default: {
-                throw new tflite.Error("Unsupported TensorFlow Lite format '" + match + "'.");
+                throw new tflite.Error("Unsupported TensorFlow Lite format '" + target + "'.");
             }
         }
         const metadata = await context.metadata('tflite-metadata.json');

+ 3 - 3
source/tnn.js

@@ -36,9 +36,9 @@ tnn.ModelFactory = class {
         return '';
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('tnn-metadata.json');
-        switch (match) {
+        switch (target) {
             case 'tnn.model': {
                 const tnnmodel = context.identifier.substring(0, context.identifier.length - 9) + '.tnnmodel';
                 try {
@@ -56,7 +56,7 @@ tnn.ModelFactory = class {
                 return new tnn.Model(metadata, buffer, context.stream.peek());
             }
             default: {
-                throw new tnn.Error("Unsupported TNN format '" + match + "'.");
+                throw new tnn.Error("Unsupported TNN format '" + target + "'.");
             }
         }
     }

+ 2 - 2
source/torch.js

@@ -7,9 +7,9 @@ torch.ModelFactory = class {
         return torch.T7Reader.open(context);
     }
 
-    async open(context, match) {
+    async open(context, target) {
         const metadata = await context.metadata('torch-metadata.json');
-        const reader = match;
+        const reader = target;
         reader.callback = (name) => {
             if (name && name != 'nn.JointTrainModule' && !name.startsWith('nn.MSDNet_') && !name.startsWith('onmt.')) {
                 context.exception(new torch.Error("Unsupported type '" + name + "'."));

+ 3 - 3
source/uff.js

@@ -27,11 +27,11 @@ uff.ModelFactory = class {
         return undefined;
     }
 
-    async open(context, match) {
+    async open(context, target) {
         await context.require('./uff-proto');
         uff.proto = protobuf.get('uff').uff;
         let meta_graph = null;
-        switch (match) {
+        switch (target) {
             case 'uff.pb': {
                 try {
                     const stream = context.stream;
@@ -54,7 +54,7 @@ uff.ModelFactory = class {
                 break;
             }
             default: {
-                throw new uff.Error("Unsupported UFF format '" + match + "'.");
+                throw new uff.Error("Unsupported UFF format '" + target + "'.");
             }
         }
         const metadata = await context.metadata('uff-metadata.json');

+ 3 - 3
source/view.js

@@ -5165,10 +5165,10 @@ view.ModelFactoryService = class {
                         throw new view.Error("Failed to load module '" + id + "'.");
                     }
                     const modelFactory = new module.ModelFactory();
-                    const match = modelFactory.match(context);
-                    if (match) {
+                    const target = modelFactory.match(context);
+                    if (target) {
                         success = true;
-                        const model = await modelFactory.open(context, match);
+                        const model = await modelFactory.open(context, target);
                         if (!model.identifier) {
                             model.identifier = context.identifier;
                         }