|
|
@@ -70,8 +70,8 @@ darknet.Graph = class {
|
|
|
let nu = 0;
|
|
|
while (lines.length > 0) {
|
|
|
nu++;
|
|
|
- let line = lines.shift();
|
|
|
- line = line.replace(/\s/g, '');
|
|
|
+ const text = lines.shift();
|
|
|
+ const line = text.replace(/\s/g, '');
|
|
|
if (line.length > 0) {
|
|
|
switch (line[0]) {
|
|
|
case '#':
|
|
|
@@ -85,10 +85,13 @@ darknet.Graph = class {
|
|
|
break;
|
|
|
}
|
|
|
default: {
|
|
|
+ if (!section || line[0] < 0x20 || line[0] > 0x7E) {
|
|
|
+ throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trimStart().trimEnd() + "' at line " + nu.toString() + ".");
|
|
|
+ }
|
|
|
if (section) {
|
|
|
let property = line.split('=');
|
|
|
if (property.length != 2) {
|
|
|
- throw new darknet.Error("Invalid cfg '" + line + "' at line " + nu.toString() + ".");
|
|
|
+ throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trimStart().trimEnd() + "' at line " + nu.toString() + ".");
|
|
|
}
|
|
|
let key = property[0].trim();
|
|
|
let value = property[1].trim();
|
|
|
@@ -100,94 +103,353 @@ darknet.Graph = class {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- for (let section of sections) {
|
|
|
- section.values = {};
|
|
|
- const schema = metadata.getSchema(section.type);
|
|
|
- if (schema && schema.attributes) {
|
|
|
- for (let attribute of schema.attributes) {
|
|
|
- if (attribute.name) {
|
|
|
- if (section.options[attribute.name] !== undefined) {
|
|
|
- switch (attribute.type) {
|
|
|
- case 'int32':
|
|
|
- section.values[attribute.name] = parseInt(section.options[attribute.name], 10);
|
|
|
- break;
|
|
|
- case 'float32':
|
|
|
- section.values[attribute.name] = parseFloat(section.options[attribute.name]);
|
|
|
- break;
|
|
|
- case 'string':
|
|
|
- section.values[attribute.name] = section.options[attribute.name];
|
|
|
- break;
|
|
|
- }
|
|
|
- }
|
|
|
- else if (attribute.default !== undefined) {
|
|
|
- section.values[attribute.name] = attribute.default
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
+ const option_find_int = (options, key, defaultValue) => {
|
|
|
+ const value = options[key];
|
|
|
+ return value !== undefined ? parseInt(value, 10) : defaultValue;
|
|
|
+ };
|
|
|
+
|
|
|
+ const option_find_str = (options, key, defaultValue) => {
|
|
|
+ const value = options[key];
|
|
|
+ return value !== undefined ? value : defaultValue;
|
|
|
+ };
|
|
|
+
|
|
|
+ let params = {};
|
|
|
+
|
|
|
+ const net = sections.shift();
|
|
|
+ switch (net.type) {
|
|
|
+ case 'net':
|
|
|
+ case 'network': {
|
|
|
+ params.h = option_find_int(net.options, 'height', 0);
|
|
|
+ params.w = option_find_int(net.options, 'width', 0);
|
|
|
+ params.c = option_find_int(net.options, 'channels', 0);
|
|
|
+ params.inputs = option_find_int(net.options, 'inputs', params.h * params.w * params.c);
|
|
|
+ break;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- if (sections.length === 0) {
|
|
|
- throw new darknet.Error('Config file has no sections.');
|
|
|
- }
|
|
|
-
|
|
|
- let net = sections.shift();
|
|
|
- if (net.type !== 'net' && net.type !== 'network') {
|
|
|
- throw new darknet.Error('First section must be [net] or [network].');
|
|
|
- }
|
|
|
-
|
|
|
- const inputType = new darknet.TensorType('float32', new darknet.TensorShape([ net.values.width, net.values.height, net.values.channels ]));
|
|
|
-
|
|
|
+ const inputType = params.w && params.h && params.c ?
|
|
|
+ new darknet.TensorType('float32', new darknet.TensorShape([ params.w, params.h, params.c ])) :
|
|
|
+ new darknet.TensorType('float32', new darknet.TensorShape([ params.inputs ]));
|
|
|
const inputName = 'input';
|
|
|
- this._inputs.push(new darknet.Parameter(inputName, true, [
|
|
|
- new darknet.Argument(inputName, inputType, null)
|
|
|
- ]));
|
|
|
+ params.arguments = [ new darknet.Argument(inputName, inputType, null) ];
|
|
|
+ this._inputs.push(new darknet.Parameter(inputName, true, params.arguments));
|
|
|
|
|
|
- for (let i = 0; i < sections.length; i++) {
|
|
|
- sections[i]._outputs = [ i.toString() ];
|
|
|
+ if (sections.length === 0) {
|
|
|
+ throw new darknet.Error('Config file has no sections.');
|
|
|
}
|
|
|
|
|
|
- let inputs = [ inputName ];
|
|
|
+ let infer = true;
|
|
|
for (let i = 0; i < sections.length; i++) {
|
|
|
- const layer = sections[i];
|
|
|
- layer._inputs = inputs;
|
|
|
- inputs = [ i.toString() ];
|
|
|
- switch (layer.type) {
|
|
|
+ let section = sections[i];
|
|
|
+ section.layer = {};
|
|
|
+ section.tensors = [];
|
|
|
+ section.inputs = [];
|
|
|
+ section.outputs = [];
|
|
|
+ const options = section.options;
|
|
|
+ let layer = section.layer;
|
|
|
+ section.inputs = section.inputs.concat(params.arguments);
|
|
|
+ section.outputs.push(new darknet.Argument(i.toString(), null, null));
|
|
|
+ switch (section.type) {
|
|
|
case 'shortcut':
|
|
|
case 'sam':
|
|
|
case 'scale_channels': {
|
|
|
- let from = Number.parseInt(layer.options.from, 10);
|
|
|
- from = (from >= 0) ? from : (i + from);
|
|
|
- const shortcut = sections[from];
|
|
|
- if (shortcut) {
|
|
|
- layer._inputs.push(shortcut._outputs[0]);
|
|
|
+ let index = option_find_int(options, 'from', 0);
|
|
|
+ if (index < 0) {
|
|
|
+ index = i + index;
|
|
|
+ }
|
|
|
+ const from = sections[index];
|
|
|
+ if (from) {
|
|
|
+ section.inputs.push(from.outputs[0]);
|
|
|
+ section.from = from;
|
|
|
}
|
|
|
+ delete options.from;
|
|
|
break;
|
|
|
}
|
|
|
case 'route': {
|
|
|
- layer._inputs = [];
|
|
|
- const routes = layer.options.layers.split(',').map((route) => Number.parseInt(route.trim(), 10));
|
|
|
+ section.inputs = [];
|
|
|
+ section.input_sections = [];
|
|
|
+ const routes = options.layers.split(',').map((route) => Number.parseInt(route.trim(), 10));
|
|
|
for (let j = 0; j < routes.length; j++) {
|
|
|
const index = (routes[j] < 0) ? i + routes[j] : routes[j];
|
|
|
const route = sections[index];
|
|
|
if (route) {
|
|
|
- layer._inputs.push(route._outputs[0]);
|
|
|
+ section.inputs.push(route.outputs[0]);
|
|
|
+ section.input_sections.push(route);
|
|
|
}
|
|
|
}
|
|
|
+ delete options.layers;
|
|
|
break;
|
|
|
}
|
|
|
}
|
|
|
+ if (infer) {
|
|
|
+ switch (section.type) {
|
|
|
+ case 'convolutional':
|
|
|
+ case 'deconvolutional': {
|
|
|
+ const w = params.w;
|
|
|
+ const h = params.h;
|
|
|
+ const c = params.c;
|
|
|
+ const size = option_find_int(options, 'size', 1);
|
|
|
+ const n = option_find_int(options, 'filters', 1);
|
|
|
+ const pad = option_find_int(options, 'pad', 0);
|
|
|
+ const padding = pad ? (size >> 1) : option_find_int(options, 'padding', 0);
|
|
|
+ const stride = option_find_int(options, 'stride', 1);
|
|
|
+ const groups = option_find_int(options, 'groups', 1);
|
|
|
+ layer.out_w = Math.floor((w + 2 * padding - size) / stride) + 1;
|
|
|
+ layer.out_h = Math.floor((h + 2 * padding - size) / stride) + 1;
|
|
|
+ layer.out_c = n;
|
|
|
+ layer.outputs = layer.out_h * layer.out_w * layer.out_c;
|
|
|
+ section.tensors.push({ name: 'weights', shape: [ Math.floor(c / groups), n, size, size ]});
|
|
|
+ section.tensors.push({ name: 'biases', shape: [ n ]});
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_h, layer.out_w, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'connected': {
|
|
|
+ const outputs = option_find_int(options, 'output', 1);
|
|
|
+ section.tensors.push({ name: 'weights', shape: [ params.inputs, outputs ] });
|
|
|
+ section.tensors.push({ name: 'biases', shape: [ outputs ] });
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ outputs ]));
|
|
|
+ layer.out_h = 1;
|
|
|
+ layer.out_w = 1;
|
|
|
+ layer.out_c = outputs;
|
|
|
+ layer.outputs = outputs;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'local': {
|
|
|
+ const shape = section.inputs[0].type.shape.dimensions;
|
|
|
+ if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
|
|
|
+ throw darknet.Error('Layer before avgpool layer must output image.');
|
|
|
+ }
|
|
|
+ const n = option_find_int(options, 'filters' , 1);
|
|
|
+ const size = option_find_int(options, 'size', 1);
|
|
|
+ const stride = option_find_int(options, 'stride', 1);
|
|
|
+ const pad = option_find_int(options, 'pad', 0);
|
|
|
+ layer.out_h = Math.floor((params.h - (pad ? 1 : size)) / stride) + 1;
|
|
|
+ layer.out_w = Math.floor((params.w - (pad ? 1 : size)) / stride) + 1;
|
|
|
+ layer.out_c = n;
|
|
|
+ layer.outputs = layer.out_w * layer.out_h * layer.out_c;
|
|
|
+ section.tensors.push({ name: 'weights', shape: [ params.c, n, size, size, layer.out_h * layer.out_w ]});
|
|
|
+ section.tensors.push({ name: 'biases', shape: [ layer.out_w * layer.out_h * layer.out_c ]});
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'maxpool': {
|
|
|
+ const shape = section.inputs[0].type.shape.dimensions;
|
|
|
+ if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
|
|
|
+ throw darknet.Error('Layer before maxpool layer must output image.');
|
|
|
+ }
|
|
|
+ const stride = option_find_int(options, 'stride', 1);
|
|
|
+ const size = option_find_int(options, 'size', stride);
|
|
|
+ const padding = option_find_int(options, 'padding', size - 1);
|
|
|
+ layer.out_w = Math.floor((params.w + padding - size) / stride) + 1;
|
|
|
+ layer.out_h = Math.floor((params.h + padding - size) / stride) + 1;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = layer.out_w * layer.out_h * layer.out_c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'avgpool': {
|
|
|
+ const shape = section.inputs[0].type.shape.dimensions;
|
|
|
+ if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
|
|
|
+ throw darknet.Error('Layer before avgpool layer must output image.');
|
|
|
+ }
|
|
|
+ layer.out_w = 1;
|
|
|
+ layer.out_h = 1;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = layer.out_c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'gru':
|
|
|
+ case 'rnn':
|
|
|
+ case 'lstm':{
|
|
|
+ const output = option_find_int(options, "output", 1);
|
|
|
+ layer.outputs = output;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ output ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'softmax':
|
|
|
+ case 'dropout': {
|
|
|
+ layer.out_w = params.w;
|
|
|
+ layer.out_h = params.h;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = params.inputs;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.outputs ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'upsample': {
|
|
|
+ const stride = option_find_int(options, 'stride', 2);
|
|
|
+ layer.out_w = params.w * stride;
|
|
|
+ layer.out_h = params.h * stride;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = layer.out_w * layer.out_h * layer.out_c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'crop': {
|
|
|
+ const shape = section.inputs[0].type.shape.dimensions;
|
|
|
+ if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
|
|
|
+ throw darknet.Error('Layer before crop layer must output image.');
|
|
|
+ }
|
|
|
+ const crop_height = option_find_int(options, 'crop_height', 1);
|
|
|
+ const crop_width = option_find_int(options, 'crop_width', 1);
|
|
|
+ layer.out_w = crop_width;
|
|
|
+ layer.out_h = crop_height;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = layer.out_w * layer.out_h * layer.out_c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'yolo': {
|
|
|
+ const w = params.w;
|
|
|
+ const h = params.h;
|
|
|
+ const classes = option_find_int(options, 'classes', 20);
|
|
|
+ const n = option_find_int(options, 'num', 1);
|
|
|
+ layer.out_h = h;
|
|
|
+ layer.out_w = w;
|
|
|
+ layer.out_c = n * (classes + 4 + 1);
|
|
|
+ layer.outputs = layer.out_h * layer.out_w * layer.out_c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_w, layer.out_h, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'region': {
|
|
|
+ const coords = option_find_int(options, 'coords', 4);
|
|
|
+ const classes = option_find_int(options, 'classes', 20);
|
|
|
+ const num = option_find_int(options, 'num', 1);
|
|
|
+ layer.outputs = params.h * params.w * num * (classes + coords + 1);
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ params.h, params.w, num, (classes + coords + 1) ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'cost': {
|
|
|
+ layer.outputs = params.inputs;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.outputs ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'reorg': {
|
|
|
+ const stride = option_find_int(options, 'stride', 1);
|
|
|
+ const reverse = option_find_int(options, 'reverse', 0);
|
|
|
+ const extra = option_find_int(options, 'extra', 0);
|
|
|
+ if (reverse) {
|
|
|
+ layer.out_w = params.w * stride;
|
|
|
+ layer.out_h = params.h * stride;
|
|
|
+ layer.out_c = Math.floor(params.c / (stride * stride));
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ layer.out_w = Math.floor(params.w / stride);
|
|
|
+ layer.out_h = Math.floor(params.h / stride);
|
|
|
+ layer.out_c = params.c * (stride * stride);
|
|
|
+ }
|
|
|
+ layer.outputs = layer.out_h * layer.out_w * layer.out_c;
|
|
|
+ if (extra) {
|
|
|
+ layer.out_w = 0;
|
|
|
+ layer.out_h = 0;
|
|
|
+ layer.out_c = 0;
|
|
|
+ layer.outputs = (params.h * params.w * params.c) + extra;
|
|
|
+ }
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.outputs ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'scale_channels': {
|
|
|
+ infer = false;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'route': {
|
|
|
+ let layers = section.input_sections.map((section) => section.layer);
|
|
|
+ layer.outputs = 0;
|
|
|
+ for (let input_layer of layers) {
|
|
|
+ layer.outputs += input_layer.outputs;
|
|
|
+ }
|
|
|
+ const first = layers.shift();
|
|
|
+ layer.out_w = first.out_w;
|
|
|
+ layer.out_h = first.out_h;
|
|
|
+ layer.out_c = first.out_c;
|
|
|
+ while (layers.length > 0) {
|
|
|
+ const next = layers.shift();
|
|
|
+ if (next.out_w === first.out_w && next.out_h === first.out_h) {
|
|
|
+ layer.out_c += next.out_c;
|
|
|
+ }
|
|
|
+ else {
|
|
|
+ layer.out_h = 0;
|
|
|
+ layer.out_w = 0;
|
|
|
+ layer.out_c = 0;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ layer.out_h, layer.out_w, layer.out_c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'shortcut': {
|
|
|
+ const from = section.from;
|
|
|
+ layer.w = from.layer.out_w;
|
|
|
+ layer.h = from.layer.out_h;
|
|
|
+ layer.c = from.layer.out_c;
|
|
|
+ layer.out_w = params.w;
|
|
|
+ layer.out_h = params.h;
|
|
|
+ layer.out_c = params.c;
|
|
|
+ layer.outputs = params.w * params.h * params.c;
|
|
|
+ section.outputs[0].type = new darknet.TensorType('float32', new darknet.TensorShape([ params.w, params.h, params.c ]));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ default: {
|
|
|
+ infer = false;
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ params.h = layer.out_h;
|
|
|
+ params.w = layer.out_w;
|
|
|
+ params.c = layer.out_c;
|
|
|
+ params.inputs = layer.outputs;
|
|
|
+ }
|
|
|
+ params.arguments = section.outputs;
|
|
|
+
|
|
|
+ const batch_normalize = option_find_int(section.options, 'batch_normalize', 0);
|
|
|
+ if (batch_normalize) {
|
|
|
+ let size = -1;
|
|
|
+ switch (section.type) {
|
|
|
+ case 'convolutional': {
|
|
|
+ size = option_find_int(options, 'filters', 1);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ case 'crnn':
|
|
|
+ case 'gru':
|
|
|
+ case 'rnn':
|
|
|
+ case 'lstm':
|
|
|
+ case 'connected': {
|
|
|
+ size = option_find_int(options, 'output', 1);
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+ if (size < 0) {
|
|
|
+ throw new darknet.Error("Invalid batch_normalize size for '" + section.type + "'.");
|
|
|
+ }
|
|
|
+ let chain = {};
|
|
|
+ chain.type = 'batch_normalize';
|
|
|
+ chain.tensors = [
|
|
|
+ { name: 'scale', shape: [ size ] },
|
|
|
+ { name: 'mean', shape: [ size ] },
|
|
|
+ { name: 'variance', shape: [ size ] }
|
|
|
+ ];
|
|
|
+ section.chain = section.chain || [];
|
|
|
+ section.chain.push(chain);
|
|
|
+ }
|
|
|
+
|
|
|
+ const defaultActivation = section.type === 'shortcut' ? 'linear' : 'logistic';
|
|
|
+ const activation = option_find_str(section.options, 'activation', defaultActivation);
|
|
|
+ if (activation !== defaultActivation) {
|
|
|
+ let chain = {};
|
|
|
+ chain.type = activation;
|
|
|
+ section.chain = section.chain || [];
|
|
|
+ section.chain.push(chain);
|
|
|
+ }
|
|
|
}
|
|
|
+
|
|
|
for (let i = 0; i < sections.length; i++) {
|
|
|
this._nodes.push(new darknet.Node(metadata, net, sections[i], i.toString()));
|
|
|
}
|
|
|
|
|
|
if (sections.length > 0) {
|
|
|
- const lastLayer = sections[sections.length - 1];
|
|
|
- for (let i = 0; i < lastLayer._outputs.length; i++) {
|
|
|
- this._outputs.push(new darknet.Parameter('output' + (i > 1 ? i.toString() : ''), true, [
|
|
|
- new darknet.Argument(lastLayer._outputs[i], null, null)
|
|
|
- ]));
|
|
|
+ const last = sections[sections.length - 1];
|
|
|
+ for (let i = 0; i < last.outputs.length; i++) {
|
|
|
+ const outputName = 'output' + (i > 1 ? i.toString() : '');
|
|
|
+ this._outputs.push(new darknet.Parameter(outputName, true, [ last.outputs[i] ]));
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
@@ -245,6 +507,13 @@ darknet.Argument = class {
|
|
|
return this._type;
|
|
|
}
|
|
|
|
|
|
+ set type(value) {
|
|
|
+ if (this._type) {
|
|
|
+ throw new darknet.Error('Invalid argument type set operation.');
|
|
|
+ }
|
|
|
+ this._type = value;
|
|
|
+ }
|
|
|
+
|
|
|
get initializer() {
|
|
|
return this._initializer;
|
|
|
}
|
|
|
@@ -252,72 +521,37 @@ darknet.Argument = class {
|
|
|
|
|
|
darknet.Node = class {
|
|
|
|
|
|
- constructor(metadata, net, layer, name) {
|
|
|
+ constructor(metadata, net, section, name) {
|
|
|
this._name = name;
|
|
|
this._metadata = metadata;
|
|
|
- this._operator = layer.type;
|
|
|
+ this._operator = section.type;
|
|
|
this._attributes = [];
|
|
|
this._inputs = [];
|
|
|
this._outputs = [];
|
|
|
this._chain = [];
|
|
|
- if (layer._inputs && layer._inputs.length > 0) {
|
|
|
- this._inputs.push(new darknet.Parameter(layer._inputs.length <= 1 ? 'input' : 'inputs', true, layer._inputs.map((input) => {
|
|
|
- return new darknet.Argument(input, null, null);
|
|
|
- })));
|
|
|
+ if (section.inputs && section.inputs.length > 0) {
|
|
|
+ this._inputs.push(new darknet.Parameter(section.inputs.length <= 1 ? 'input' : 'inputs', true, section.inputs));
|
|
|
}
|
|
|
- if (layer._outputs && layer._outputs.length > 0) {
|
|
|
- this._outputs.push(new darknet.Parameter(layer._outputs.length <= 1 ? 'output' : 'outputs', true, layer._outputs.map((output) => {
|
|
|
- return new darknet.Argument(output, null, null);
|
|
|
- })));
|
|
|
+ if (section.tensors && section.tensors.length > 0) {
|
|
|
+ for (let tensor of section.tensors) {
|
|
|
+ const type = new darknet.TensorType('float', new darknet.TensorShape(tensor.shape));
|
|
|
+ this._inputs.push(new darknet.Parameter(tensor.name, true, [
|
|
|
+ new darknet.Argument('', null, new darknet.Tensor('', type) )
|
|
|
+ ]))
|
|
|
+ }
|
|
|
}
|
|
|
- switch (layer.type) {
|
|
|
- case 'convolutional':
|
|
|
- case 'deconvolutional':
|
|
|
- this._initializer('biases', [ layer.values.filters ]);
|
|
|
- this._initializer('weights', [ net.values.channels, layer.values.size, layer.values.size, layer.values.filters ]);
|
|
|
- this._batch_normalize(metadata, net, layer, layer.values.filters);
|
|
|
- this._activation(metadata, net, layer, 'logistic');
|
|
|
- break;
|
|
|
- case 'connected':
|
|
|
- this._initializer('biases', [ layer.values.output ]);
|
|
|
- this._initializer('weights');
|
|
|
- this._batch_normalize(metadata, net, layer, layer.values.output);
|
|
|
- this._activation(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'crnn':
|
|
|
- this._batch_normalize(metadata, net, layer);
|
|
|
- this._activation(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'rnn':
|
|
|
- this._batch_normalize(metadata, net, layer, layer.values.output);
|
|
|
- this._activation(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'gru':
|
|
|
- this._batch_normalize(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'lstm':
|
|
|
- this._batch_normalize(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'shortcut':
|
|
|
- this._activation(metadata, net, layer);
|
|
|
- break;
|
|
|
- case 'batch_normalize':
|
|
|
- this._initializer('scale', [ layer.values.size ]);
|
|
|
- this._initializer('mean', [ layer.values.size ]);
|
|
|
- this._initializer('variance', [ layer.values.size ]);
|
|
|
- break;
|
|
|
+ if (section.outputs && section.outputs.length > 0) {
|
|
|
+ this._outputs.push(new darknet.Parameter(section.outputs.length <= 1 ? 'output' : 'outputs', true, section.outputs));
|
|
|
}
|
|
|
-
|
|
|
- switch (layer.type) {
|
|
|
- case 'shortcut':
|
|
|
- delete layer.options.from;
|
|
|
- break;
|
|
|
- case 'route':
|
|
|
- delete layer.options.layers;
|
|
|
- break;
|
|
|
+ if (section.chain) {
|
|
|
+ for (let chain of section.chain) {
|
|
|
+ this._chain.push(new darknet.Node(metadata, net, chain, ''));
|
|
|
+ }
|
|
|
}
|
|
|
- for (let key of Object.keys(layer.options)) {
|
|
|
- this._attributes.push(new darknet.Attribute(metadata, this._operator, key, layer.options[key]));
|
|
|
+ if (section.options) {
|
|
|
+ for (let key of Object.keys(section.options)) {
|
|
|
+ this._attributes.push(new darknet.Attribute(metadata, this._operator, key, section.options[key]));
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -390,31 +624,6 @@ darknet.Node = class {
|
|
|
get chain() {
|
|
|
return this._chain;
|
|
|
}
|
|
|
-
|
|
|
- _initializer(name, shape) {
|
|
|
- const id = this._name.toString() + '_' + name;
|
|
|
- this._inputs.push(new darknet.Parameter(name, true, [
|
|
|
- new darknet.Argument(id, null, new darknet.Tensor(id, shape))
|
|
|
- ]));
|
|
|
- }
|
|
|
-
|
|
|
- _batch_normalize(metadata, net, layer, size) {
|
|
|
- if (layer.values.batch_normalize === 1) {
|
|
|
- const batch_normalize_layer = { type: 'batch_normalize', options: {}, values: { size: size || 0 }, _inputs: [], _outputs: [] };
|
|
|
- this._chain.push(new darknet.Node(metadata, net, batch_normalize_layer, ''));
|
|
|
- }
|
|
|
- delete layer.options.batch_normalize;
|
|
|
- }
|
|
|
-
|
|
|
- _activation(metadata, net, layer) {
|
|
|
- const attributeSchema = metadata.getAttributeSchema(layer.type, 'activation');
|
|
|
- if (attributeSchema) {
|
|
|
- if (layer.options.activation !== attributeSchema.default) {
|
|
|
- this._chain.push(new darknet.Node(metadata, net, { type: layer.options.activation, options: {}, values: {}, _inputs: [], _outputs: [] }, ''));
|
|
|
- }
|
|
|
- delete layer.options.activation;
|
|
|
- }
|
|
|
- }
|
|
|
};
|
|
|
|
|
|
darknet.Attribute = class {
|
|
|
@@ -465,10 +674,13 @@ darknet.Attribute = class {
|
|
|
|
|
|
darknet.Tensor = class {
|
|
|
|
|
|
- constructor(id, shape) {
|
|
|
- shape = shape || null;
|
|
|
+ constructor(id, type) {
|
|
|
this._id = id;
|
|
|
- this._type = new darknet.TensorType('?', new darknet.TensorShape(shape));
|
|
|
+ this._type = type;
|
|
|
+ }
|
|
|
+
|
|
|
+ get kind() {
|
|
|
+ return 'Tensor';
|
|
|
}
|
|
|
|
|
|
get name() {
|
|
|
@@ -515,6 +727,9 @@ darknet.TensorType = class {
|
|
|
darknet.TensorShape = class {
|
|
|
|
|
|
constructor(dimensions) {
|
|
|
+ if (dimensions.some((dimension) => !dimension)) {
|
|
|
+ throw new darknet.Error('Invalid tensor shape.');
|
|
|
+ }
|
|
|
this._dimensions = dimensions;
|
|
|
}
|
|
|
|