darknet.js 55 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175
  1. /* jshint esversion: 6 */
  2. var darknet = darknet || {};
  3. darknet.ModelFactory = class {
  4. match(context) {
  5. const extension = context.identifier.split('.').pop().toLowerCase();
  6. if (extension == 'cfg' || extension == 'model') {
  7. const contains = (buffer, length, text) => {
  8. length = Math.min(buffer.length - text.length, length - text.length);
  9. const match = Array.from(text).map((c) => c.charCodeAt(0));
  10. for (let i = 0; i < length; i++) {
  11. if (match.every((c, index) => buffer[i + index] === c)) {
  12. return true;
  13. }
  14. }
  15. return false;
  16. };
  17. if (contains(context.buffer, 1024, '[net]')) {
  18. return true;
  19. }
  20. }
  21. return false;
  22. }
  23. open(context, host) {
  24. return darknet.Metadata.open(host).then((metadata) => {
  25. const identifier = context.identifier;
  26. const parts = identifier.split('.');
  27. parts.pop();
  28. const basename = parts.join('.');
  29. return context.request(basename + '.weights', null).then((weights) => {
  30. return this._openModel(metadata, identifier, context.text, weights);
  31. }).catch(() => {
  32. return this._openModel(metadata, identifier, context.text, null);
  33. });
  34. });
  35. }
  36. _openModel( metadata, identifier, cfg, weights) {
  37. try {
  38. return new darknet.Model(metadata, cfg, weights ? new darknet.Weights(weights) : null);
  39. }
  40. catch (error) {
  41. const message = error && error.message ? error.message : error.toString();
  42. throw new darknet.Error(message.replace(/\.$/, '') + " in '" + identifier + "'.");
  43. }
  44. }
  45. };
  46. darknet.Model = class {
  47. constructor(metadata, cfg, weights) {
  48. this._graphs = [ new darknet.Graph(metadata, cfg, weights) ];
  49. }
  50. get format() {
  51. return 'Darknet';
  52. }
  53. get graphs() {
  54. return this._graphs;
  55. }
  56. };
  57. darknet.Graph = class {
  58. constructor(metadata, cfg, weights) {
  59. this._inputs = [];
  60. this._outputs = [];
  61. this._nodes = [];
  62. // read_cfg
  63. const sections = [];
  64. let section = null;
  65. const lines = cfg.split('\n');
  66. let lineNumber = 0;
  67. while (lines.length > 0) {
  68. lineNumber++;
  69. const text = lines.shift();
  70. const line = text.replace(/\s/g, '');
  71. if (line.length > 0) {
  72. switch (line[0]) {
  73. case '#':
  74. case ';':
  75. break;
  76. case '[': {
  77. section = {};
  78. section.line = lineNumber;
  79. section.type = line[line.length - 1] === ']' ? line.substring(1, line.length - 1) : line.substring(1);
  80. section.options = {};
  81. sections.push(section);
  82. break;
  83. }
  84. default: {
  85. if (!section || line[0] < 0x20 || line[0] > 0x7E) {
  86. throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trim() + "' at line " + lineNumber.toString() + ".");
  87. }
  88. if (section) {
  89. const index = line.indexOf('=');
  90. if (index < 0) {
  91. throw new darknet.Error("Invalid cfg '" + text.replace(/[^\x20-\x7E]+/g, '').trim() + "' at line " + lineNumber.toString() + ".");
  92. }
  93. const key = line.substring(0, index);
  94. const value = line.substring(index + 1);
  95. section.options[key] = value;
  96. }
  97. break;
  98. }
  99. }
  100. }
  101. }
  102. const option_find_int = (options, key, defaultValue) => {
  103. let value = options[key];
  104. if (typeof value === 'string' && value.startsWith('$')) {
  105. const key = value.substring(1);
  106. value = globals.has(key) ? globals.get(key) : value;
  107. }
  108. if (value !== undefined) {
  109. const number = parseInt(value, 10);
  110. if (!Number.isInteger(number)) {
  111. throw new darknet.Error("Invalid int option '" + JSON.stringify(options[key]) + "'.");
  112. }
  113. return number;
  114. }
  115. return defaultValue;
  116. };
  117. const option_find_str = (options, key, defaultValue) => {
  118. const value = options[key];
  119. return value !== undefined ? value : defaultValue;
  120. };
  121. const make_shape = (dimensions, source) => {
  122. if (dimensions.some((dimension) => dimension === 0 || dimension === undefined || isNaN(dimension))) {
  123. throw new darknet.Error("Invalid tensor shape '" + JSON.stringify(dimensions) + "' in '" + source + "'.");
  124. }
  125. return new darknet.TensorShape(dimensions);
  126. };
  127. const load_weights = (name, shape, visible) => {
  128. const data = weights ? weights.bytes(4 * shape.reduce((a, b) => a * b)) : null;
  129. const type = new darknet.TensorType('float32', make_shape(shape, 'load_weights'));
  130. const initializer = new darknet.Tensor(type, data);
  131. const argument = new darknet.Argument('', null, initializer);
  132. return new darknet.Parameter(name, visible === false ? false : true, [ argument ]);
  133. };
  134. const load_batch_normalize_weights = (layer, prefix, size) => {
  135. layer.weights.push(load_weights(prefix + 'scale', [ size ], prefix === ''));
  136. layer.weights.push(load_weights(prefix + 'mean', [ size ], prefix === ''));
  137. layer.weights.push(load_weights(prefix + 'variance', [ size ], prefix === ''));
  138. };
  139. const make_convolutional_layer = (layer, prefix, w, h, c, n, groups, size, stride_x, stride_y, padding, batch_normalize) => {
  140. layer.out_w = Math.floor((w + 2 * padding - size) / stride_x) + 1;
  141. layer.out_h = Math.floor((h + 2 * padding - size) / stride_y) + 1;
  142. layer.out_c = n;
  143. layer.out = layer.out_w * layer.out_h * layer.out_c;
  144. layer.weights.push(load_weights(prefix + 'biases', [ n ], prefix === ''));
  145. if (batch_normalize) {
  146. load_batch_normalize_weights(layer, prefix, n);
  147. }
  148. layer.weights.push(load_weights(prefix + 'weights', [ Math.floor(c / groups), n, size, size ], prefix === ''));
  149. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'make_convolutional_layer'));
  150. };
  151. const make_connected_layer = (layer, prefix, inputs, outputs, batch_normalize) => {
  152. layer.out_h = 1;
  153. layer.out_w = 1;
  154. layer.out_c = outputs;
  155. layer.out = outputs;
  156. layer.weights.push(load_weights(prefix + 'biases', [ outputs ], prefix === ''));
  157. if (batch_normalize) {
  158. load_batch_normalize_weights(layer, prefix, outputs);
  159. }
  160. layer.weights.push(load_weights(prefix + 'weights', [ inputs, outputs ], prefix === ''));
  161. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ outputs ], 'make_connected_layer'));
  162. };
  163. const params = {};
  164. const globals = new Map();
  165. const net = sections.shift();
  166. switch (net.type) {
  167. case 'net':
  168. case 'network': {
  169. params.h = option_find_int(net.options, 'height', 0);
  170. params.w = option_find_int(net.options, 'width', 0);
  171. params.c = option_find_int(net.options, 'channels', 0);
  172. params.inputs = option_find_int(net.options, 'inputs', params.h * params.w * params.c);
  173. for (const key of Object.keys(net.options)) {
  174. globals.set(key, net.options[key]);
  175. }
  176. break;
  177. }
  178. }
  179. const inputType = params.w && params.h && params.c ?
  180. new darknet.TensorType('float32', make_shape([ params.w, params.h, params.c ], 'params-if')) :
  181. new darknet.TensorType('float32', make_shape([ params.inputs ], 'params-else'));
  182. const inputName = 'input';
  183. params.arguments = [ new darknet.Argument(inputName, inputType, null) ];
  184. this._inputs.push(new darknet.Parameter(inputName, true, params.arguments));
  185. if (sections.length === 0) {
  186. throw new darknet.Error('Config file has no sections.');
  187. }
  188. let infer = true;
  189. for (let i = 0; i < sections.length; i++) {
  190. const section = sections[i];
  191. section.name = i.toString();
  192. section.chain = [];
  193. section.layer = {};
  194. const options = section.options;
  195. const layer = section.layer;
  196. layer.inputs = [].concat(params.arguments);
  197. layer.outputs = [ new darknet.Argument(i.toString(), null, null) ];
  198. layer.weights = [];
  199. switch (section.type) {
  200. case 'shortcut': {
  201. const from = options.from ? options.from.split(',').map((item) => Number.parseInt(item.trim(), 10)) : [];
  202. for (let index of from) {
  203. index = (index < 0) ? i + index : index;
  204. const item = sections[index] ? sections[index].layer : null;
  205. if (item) {
  206. layer.inputs.push(item.outputs[0]);
  207. }
  208. }
  209. delete options.from;
  210. break;
  211. }
  212. case 'sam':
  213. case 'scale_channels': {
  214. let index = option_find_int(options, 'from', 0);
  215. index = (index < 0) ? i + index : index;
  216. const item = index < sections.length ? sections[index].layer : null;
  217. if (item) {
  218. layer.from = item;
  219. layer.inputs.push(item.outputs[0]);
  220. }
  221. delete options.from;
  222. break;
  223. }
  224. case 'route': {
  225. layer.inputs = [];
  226. layer.layers = [];
  227. const routes = options.layers ? options.layers.split(',').map((route) => Number.parseInt(route.trim(), 10)) : [];
  228. for (let j = 0; j < routes.length; j++) {
  229. const index = (routes[j] < 0) ? i + routes[j] : routes[j];
  230. const item = index < sections.length ? sections[index].layer : null;
  231. if (item) {
  232. layer.inputs.push(item.outputs[0]);
  233. layer.layers.push(item);
  234. }
  235. }
  236. delete options.layers;
  237. break;
  238. }
  239. }
  240. if (infer) {
  241. switch (section.type) {
  242. case 'conv':
  243. case 'convolutional':
  244. case 'deconvolutional': {
  245. const shape = layer.inputs[0].type.shape.dimensions;
  246. if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
  247. throw new darknet.Error('Layer before convolutional layer must output image.');
  248. }
  249. const size = option_find_int(options, 'size', 1);
  250. const n = option_find_int(options, 'filters', 1);
  251. const pad = option_find_int(options, 'pad', 0);
  252. const padding = pad ? (size >> 1) : option_find_int(options, 'padding', 0);
  253. let stride_x = option_find_int(options, 'stride_x', -1);
  254. let stride_y = option_find_int(options, 'stride_y', -1);
  255. if (stride_x < 1 || stride_y < 1) {
  256. const stride = option_find_int(options, 'stride', 1);
  257. stride_x = stride_x < 1 ? stride : stride_x;
  258. stride_y = stride_y < 1 ? stride : stride_y;
  259. }
  260. const groups = option_find_int(options, 'groups', 1);
  261. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  262. const activation = option_find_str(options, 'activation', 'logistic');
  263. make_convolutional_layer(layer, '', params.w, params.h, params.c, n, groups, size, stride_x, stride_y, padding, batch_normalize);
  264. if (activation !== 'logistic') {
  265. section.chain.push({ type: activation });
  266. }
  267. break;
  268. }
  269. case 'connected': {
  270. const outputs = option_find_int(options, 'output', 1);
  271. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  272. const activation = option_find_str(options, 'activation', 'logistic');
  273. make_connected_layer(layer, '', params.inputs, outputs, batch_normalize);
  274. if (activation !== 'logistic') {
  275. section.chain.push({ type: activation });
  276. }
  277. break;
  278. }
  279. case 'local': {
  280. const shape = layer.inputs[0].type.shape.dimensions;
  281. if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
  282. throw new darknet.Error('Layer before avgpool layer must output image.');
  283. }
  284. const n = option_find_int(options, 'filters' , 1);
  285. const size = option_find_int(options, 'size', 1);
  286. const stride = option_find_int(options, 'stride', 1);
  287. const pad = option_find_int(options, 'pad', 0);
  288. const activation = option_find_str(options, 'activation', 'logistic');
  289. layer.out_h = Math.floor((params.h - (pad ? 1 : size)) / stride) + 1;
  290. layer.out_w = Math.floor((params.w - (pad ? 1 : size)) / stride) + 1;
  291. layer.out_c = n;
  292. layer.out = layer.out_w * layer.out_h * layer.out_c;
  293. layer.weights.push(load_weights('weights', [ params.c, n, size, size, layer.out_h * layer.out_w ]));
  294. layer.weights.push(load_weights('biases',[ layer.out_w * layer.out_h * layer.out_c ]));
  295. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'local'));
  296. if (activation !== 'logistic') {
  297. section.chain.push({ type: activation });
  298. }
  299. break;
  300. }
  301. case 'batchnorm': {
  302. layer.out_h = params.h;
  303. layer.out_w = params.w;
  304. layer.out_c = params.c;
  305. layer.out = layer.in;
  306. load_batch_normalize_weights(weights, section, '', layer.out);
  307. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.ouputs ], 'batchnorm'));
  308. break;
  309. }
  310. case 'activation': {
  311. layer.out_h = params.h;
  312. layer.out_w = params.w;
  313. layer.out_c = params.c;
  314. layer.out = layer.in;
  315. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.ouputs ], 'activation'));
  316. break;
  317. }
  318. case 'max':
  319. case 'maxpool': {
  320. const shape = layer.inputs[0].type.shape.dimensions;
  321. if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
  322. throw new darknet.Error('Layer before maxpool layer must output image.');
  323. }
  324. const antialiasing = option_find_int(options, 'antialiasing', 0);
  325. const stride = option_find_int(options, 'stride', 1);
  326. const blur_stride_x = option_find_int(options, 'stride_x', stride);
  327. const blur_stride_y = option_find_int(options, 'stride_y', stride);
  328. const stride_x = antialiasing ? 1 : blur_stride_x;
  329. const stride_y = antialiasing ? 1 : blur_stride_y;
  330. const size = option_find_int(options, 'size', stride);
  331. const padding = option_find_int(options, 'padding', size - 1);
  332. const out_channels = option_find_int(options, 'out_channels', 1);
  333. const maxpool_depth = option_find_int(options, 'maxpool_depth', 0);
  334. if (maxpool_depth) {
  335. layer.out_c = out_channels;
  336. layer.out_w = params.w;
  337. layer.out_h = params.h;
  338. }
  339. else {
  340. layer.out_w = Math.floor((params.w + padding - size) / stride_x) + 1;
  341. layer.out_h = Math.floor((params.h + padding - size) / stride_y) + 1;
  342. layer.out_c = params.c;
  343. }
  344. if (antialiasing) {
  345. const blur_size = antialiasing === 2 ? 2 : 3;
  346. const blur_pad = antialiasing === 2 ? 0 : Math.floor(blur_size / 3);
  347. layer.input_layer = { weights: [], outputs: layer.outputs };
  348. make_convolutional_layer(layer.input_layer, '', layer.out_h, layer.out_w, layer.out_c, layer.out_c, layer.out_c, blur_size, blur_stride_x, blur_stride_y, blur_pad, 0);
  349. layer.out_w = layer.input_layer.out_w;
  350. layer.out_h = layer.input_layer.out_h;
  351. layer.out_c = layer.input_layer.out_c;
  352. }
  353. else {
  354. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'maxpool'));
  355. }
  356. layer.out = layer.out_w * layer.out_h * layer.out_c;
  357. break;
  358. }
  359. case 'avgpool': {
  360. const shape = layer.inputs[0].type.shape.dimensions;
  361. if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
  362. throw new darknet.Error('Layer before avgpool layer must output image.');
  363. }
  364. layer.out_w = 1;
  365. layer.out_h = 1;
  366. layer.out_c = params.c;
  367. layer.out = layer.out_c;
  368. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'avgpool'));
  369. break;
  370. }
  371. case 'crnn': {
  372. const size = option_find_int(options, 'size', 3);
  373. const stride = option_find_int(options, 'stride', 1);
  374. const output_filters = option_find_int(options, 'output', 1);
  375. const hidden_filters = option_find_int(options, 'hidden', 1);
  376. const groups = option_find_int(options, 'groups', 1);
  377. const pad = option_find_int(options, 'pad', 0);
  378. const padding = pad ? (size >> 1) : option_find_int(options, 'padding', 0);
  379. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  380. layer.input_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  381. make_convolutional_layer(layer.input_layer, 'input_', params.h, params.w, params.c, hidden_filters, groups, size, stride, stride, padding, batch_normalize);
  382. layer.self_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  383. make_convolutional_layer(layer.self_layer, 'self_', params.h, params.w, hidden_filters, hidden_filters, groups, size, stride, stride, padding, batch_normalize);
  384. layer.output_layer = { weights: [], outputs: layer.outputs };
  385. make_convolutional_layer(layer.output_layer, 'output_', params.h, params.w, hidden_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  386. layer.weights = layer.weights.concat(layer.input_layer.weights);
  387. layer.weights = layer.weights.concat(layer.self_layer.weights);
  388. layer.weights = layer.weights.concat(layer.output_layer.weights);
  389. layer.out_h = layer.output_layer.out_h;
  390. layer.out_w = layer.output_layer.out_w;
  391. layer.out_c = output_filters;
  392. layer.out = layer.output_layer.out;
  393. break;
  394. }
  395. case 'rnn': {
  396. const outputs = option_find_int(options, 'output', 1);
  397. const hidden = option_find_int(options, 'hidden', 1);
  398. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  399. const inputs = params.inputs;
  400. layer.input_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  401. make_connected_layer(layer.input_layer, 'input_', inputs, hidden, batch_normalize);
  402. layer.self_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  403. make_connected_layer(layer.self_layer, 'self_', hidden, hidden, batch_normalize);
  404. layer.output_layer = { weights: [], outputs: layer.outputs };
  405. make_connected_layer(layer.output_layer, 'output_', hidden, outputs, batch_normalize);
  406. layer.weights = layer.weights.concat(layer.input_layer.weights);
  407. layer.weights = layer.weights.concat(layer.self_layer.weights);
  408. layer.weights = layer.weights.concat(layer.output_layer.weights);
  409. layer.out_w = 1;
  410. layer.out_h = 1;
  411. layer.out_c = outputs;
  412. layer.out = outputs;
  413. break;
  414. }
  415. case 'gru': {
  416. const inputs = params.inputs;
  417. const outputs = option_find_int(options, 'output', 1);
  418. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  419. layer.input_z_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  420. make_connected_layer(layer.input_z_layer, 'input_z', inputs, outputs, batch_normalize);
  421. layer.state_z_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  422. make_connected_layer(layer.state_z_layer, 'state_z', outputs, outputs, batch_normalize);
  423. layer.input_r_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  424. make_connected_layer(layer.input_r_layer, 'input_r', inputs, outputs, batch_normalize);
  425. layer.state_r_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  426. make_connected_layer(layer.state_r_layer, 'state_r', outputs, outputs, batch_normalize);
  427. layer.input_h_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  428. make_connected_layer(layer.input_h_layer, 'input_h', inputs, outputs, batch_normalize);
  429. layer.state_h_layer = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  430. make_connected_layer(layer.state_h_layer, 'state_h', outputs, outputs, batch_normalize);
  431. layer.weights = layer.weights.concat(layer.input_z_layer.weights);
  432. layer.weights = layer.weights.concat(layer.state_z_layer.weights);
  433. layer.weights = layer.weights.concat(layer.input_r_layer.weights);
  434. layer.weights = layer.weights.concat(layer.state_r_layer.weights);
  435. layer.weights = layer.weights.concat(layer.input_h_layer.weights);
  436. layer.weights = layer.weights.concat(layer.state_h_layer.weights);
  437. layer.out = outputs;
  438. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ outputs ], 'gru'));
  439. break;
  440. }
  441. case 'lstm': {
  442. const inputs = params.inputs;
  443. const outputs = option_find_int(options, 'output', 1);
  444. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  445. layer.uf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  446. make_connected_layer(layer.uf, 'uf_', inputs, outputs, batch_normalize);
  447. layer.ui = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  448. make_connected_layer(layer.ui, 'ui_', inputs, outputs, batch_normalize);
  449. layer.ug = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  450. make_connected_layer(layer.ug, 'ug_', inputs, outputs, batch_normalize);
  451. layer.uo = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  452. make_connected_layer(layer.uo, 'uo_', inputs, outputs, batch_normalize);
  453. layer.wf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  454. make_connected_layer(layer.wf, 'wf_', outputs, outputs, batch_normalize);
  455. layer.wi = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  456. make_connected_layer(layer.wi, 'wi_', outputs, outputs, batch_normalize);
  457. layer.wg = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  458. make_connected_layer(layer.wg, 'wg_', outputs, outputs, batch_normalize);
  459. layer.wo = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  460. make_connected_layer(layer.wo, 'wo_', outputs, outputs, batch_normalize);
  461. layer.weights = layer.weights.concat(layer.uf.weights);
  462. layer.weights = layer.weights.concat(layer.ui.weights);
  463. layer.weights = layer.weights.concat(layer.ug.weights);
  464. layer.weights = layer.weights.concat(layer.uo.weights);
  465. layer.weights = layer.weights.concat(layer.wf.weights);
  466. layer.weights = layer.weights.concat(layer.wi.weights);
  467. layer.weights = layer.weights.concat(layer.wg.weights);
  468. layer.weights = layer.weights.concat(layer.wo.weights);
  469. layer.out_w = 1;
  470. layer.out_h = 1;
  471. layer.out_c = outputs;
  472. layer.out = outputs;
  473. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ outputs ], 'lstm'));
  474. weights = null;
  475. break;
  476. }
  477. case 'conv_lstm': {
  478. const size = option_find_int(options, "size", 3);
  479. const stride = option_find_int(options, "stride", 1);
  480. const output_filters = option_find_int(options, "output", 1);
  481. const groups = option_find_int(options, "groups", 1);
  482. const pad = option_find_int(options, "pad", 0);
  483. const padding = pad ? (size >> 1) : option_find_int(options, 'padding', 0);
  484. const batch_normalize = option_find_int(options, 'batch_normalize', 0);
  485. const bottleneck = option_find_int(options, "bottleneck", 0);
  486. const peephole = option_find_int(options, "peephole", 0);
  487. layer.uf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  488. make_convolutional_layer(layer.uf, 'uf_', params.h, params.w, params.c, output_filters, groups, size, stride, stride, padding, batch_normalize);
  489. layer.ui = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  490. make_convolutional_layer(layer.ui, 'ui_', params.h, params.w, params.c, output_filters, groups, size, stride, stride, padding, batch_normalize);
  491. layer.ug = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  492. make_convolutional_layer(layer.ug, 'ug_', params.h, params.w, params.c, output_filters, groups, size, stride, stride, padding, batch_normalize);
  493. layer.uo = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  494. make_convolutional_layer(layer.uo, 'uo_', params.h, params.w, params.c, output_filters, groups, size, stride, stride, padding, batch_normalize);
  495. layer.weights = layer.weights.concat(layer.uf.weights);
  496. layer.weights = layer.weights.concat(layer.ui.weights);
  497. layer.weights = layer.weights.concat(layer.ug.weights);
  498. layer.weights = layer.weights.concat(layer.uo.weights);
  499. if (bottleneck) {
  500. layer.wf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  501. make_convolutional_layer(layer.wf, 'wf_', params.h, params.w, output_filters * 2, output_filters, groups, size, stride, stride, padding, batch_normalize);
  502. layer.weights = layer.weights.concat(layer.wf.weights);
  503. }
  504. else {
  505. layer.wf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  506. make_convolutional_layer(layer.wf, 'wf_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  507. layer.wi = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  508. make_convolutional_layer(layer.wi, 'wi_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  509. layer.wg = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  510. make_convolutional_layer(layer.wg, 'wg_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  511. layer.wo = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  512. make_convolutional_layer(layer.wo, 'wo_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  513. layer.weights = layer.weights.concat(layer.wf.weights);
  514. layer.weights = layer.weights.concat(layer.wi.weights);
  515. layer.weights = layer.weights.concat(layer.wg.weights);
  516. layer.weights = layer.weights.concat(layer.wo.weights);
  517. }
  518. if (peephole) {
  519. layer.vf = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  520. make_convolutional_layer(layer.vf, 'vf_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  521. layer.vi = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  522. make_convolutional_layer(layer.vi, 'vi_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  523. layer.vo = { weights: [], outputs: [ new darknet.Argument('', null, null) ] };
  524. make_convolutional_layer(layer.wo, 'vo_', params.h, params.w, output_filters, output_filters, groups, size, stride, stride, padding, batch_normalize);
  525. layer.weights = layer.weights.concat(layer.vf.weights);
  526. layer.weights = layer.weights.concat(layer.vi.weights);
  527. layer.weights = layer.weights.concat(layer.vo.weights);
  528. }
  529. layer.out_h = layer.uo.out_h;
  530. layer.out_w = layer.uo.out_w;
  531. layer.out_c = output_filters;
  532. layer.out = layer.out_h * layer.out_w * layer.out_c;
  533. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'conv_lstm'));
  534. break;
  535. }
  536. case 'softmax': {
  537. layer.out_w = params.w;
  538. layer.out_h = params.h;
  539. layer.out_c = params.c;
  540. layer.out = params.inputs;
  541. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out ], 'softmax'));
  542. break;
  543. }
  544. case 'dropout': {
  545. layer.out_w = params.w;
  546. layer.out_h = params.h;
  547. layer.out_c = params.c;
  548. layer.out = params.inputs;
  549. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'dropout'));
  550. break;
  551. }
  552. case 'upsample': {
  553. const stride = option_find_int(options, 'stride', 2);
  554. layer.out_w = params.w * stride;
  555. layer.out_h = params.h * stride;
  556. layer.out_c = params.c;
  557. layer.out = layer.out_w * layer.out_h * layer.out_c;
  558. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'upsample'));
  559. break;
  560. }
  561. case 'crop': {
  562. const shape = layer.inputs[0].type.shape.dimensions;
  563. if (shape[0] !== params.w || shape[1] !== params.h || shape[2] !== params.c) {
  564. throw new darknet.Error('Layer before crop layer must output image.');
  565. }
  566. const crop_height = option_find_int(options, 'crop_height', 1);
  567. const crop_width = option_find_int(options, 'crop_width', 1);
  568. layer.out_w = crop_width;
  569. layer.out_h = crop_height;
  570. layer.out_c = params.c;
  571. layer.out = layer.out_w * layer.out_h * layer.out_c;
  572. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'crop'));
  573. break;
  574. }
  575. case 'yolo': {
  576. const classes = option_find_int(options, 'classes', 20);
  577. const n = option_find_int(options, 'num', 1);
  578. layer.out_h = params.h;
  579. layer.out_w = params.w;
  580. layer.out_c = n * (classes + 4 + 1);
  581. layer.out = layer.out_h * layer.out_w * layer.out_c;
  582. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'yolo'));
  583. break;
  584. }
  585. case 'Gaussian_yolo': {
  586. const classes = option_find_int(options, 'classes', 20);
  587. const n = option_find_int(options, 'num', 1);
  588. layer.out_h = params.h;
  589. layer.out_w = params.w;
  590. layer.out_c = n * (classes + 8 + 1);
  591. layer.out = layer.out_h * layer.out_w * layer.out_c;
  592. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'Gaussian_yolo'));
  593. break;
  594. }
  595. case 'region': {
  596. const coords = option_find_int(options, 'coords', 4);
  597. const classes = option_find_int(options, 'classes', 20);
  598. const num = option_find_int(options, 'num', 1);
  599. layer.out = params.h * params.w * num * (classes + coords + 1);
  600. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ params.h, params.w, num, (classes + coords + 1) ], 'region'));
  601. break;
  602. }
  603. case 'cost': {
  604. layer.out = params.inputs;
  605. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out ], 'cost'));
  606. break;
  607. }
  608. case 'reorg': {
  609. const stride = option_find_int(options, 'stride', 1);
  610. const reverse = option_find_int(options, 'reverse', 0);
  611. const extra = option_find_int(options, 'extra', 0);
  612. if (reverse) {
  613. layer.out_w = params.w * stride;
  614. layer.out_h = params.h * stride;
  615. layer.out_c = Math.floor(params.c / (stride * stride));
  616. }
  617. else {
  618. layer.out_w = Math.floor(params.w / stride);
  619. layer.out_h = Math.floor(params.h / stride);
  620. layer.out_c = params.c * (stride * stride);
  621. }
  622. layer.out = layer.out_h * layer.out_w * layer.out_c;
  623. if (extra) {
  624. layer.out_w = 0;
  625. layer.out_h = 0;
  626. layer.out_c = 0;
  627. layer.out = (params.h * params.w * params.c) + extra;
  628. }
  629. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out ], 'reorg'));
  630. break;
  631. }
  632. case 'route': {
  633. const layers = [].concat(layer.layers);
  634. const groups = option_find_int(options, 'groups', 1);
  635. layer.out = 0;
  636. for (const next of layers) {
  637. layer.out += next.outputs / groups;
  638. }
  639. if (layers.length > 0) {
  640. const first = layers.shift();
  641. layer.out_w = first.out_w;
  642. layer.out_h = first.out_h;
  643. layer.out_c = first.out_c / groups;
  644. while (layers.length > 0) {
  645. const next = layers.shift();
  646. if (next.out_w === first.out_w && next.out_h === first.out_h) {
  647. layer.out_c += next.out_c;
  648. continue;
  649. }
  650. infer = false;
  651. break;
  652. }
  653. if (infer) {
  654. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'route'));
  655. }
  656. }
  657. else {
  658. infer = false;
  659. }
  660. if (!infer) {
  661. layer.out_h = 0;
  662. layer.out_w = 0;
  663. layer.out_c = 0;
  664. }
  665. break;
  666. }
  667. case 'sam':
  668. case 'scale_channels': {
  669. const activation = option_find_str(options, 'activation', 'linear');
  670. layer.out_w = layer.from.out_w;
  671. layer.out_h = layer.from.out_h;
  672. layer.out_c = layer.from.out_c;
  673. layer.out = layer.out_w * layer.out_h * layer.out_c;
  674. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out_w, layer.out_h, layer.out_c ], 'shortcut|scale_channels|sam'));
  675. if (activation !== 'linear') {
  676. section.chain.push({ type: activation });
  677. }
  678. break;
  679. }
  680. case 'shortcut': {
  681. const activation = option_find_str(options, 'activation', 'linear');
  682. layer.out_w = params.w;
  683. layer.out_h = params.h;
  684. layer.out_c = params.c;
  685. layer.out = params.w * params.h * params.c;
  686. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ params.w, params.h, params.c ], 'shortcut|scale_channels|sam'));
  687. if (activation !== 'linear') {
  688. section.chain.push({ type: activation });
  689. }
  690. break;
  691. }
  692. case 'detection': {
  693. layer.out_w = params.w;
  694. layer.out_h = params.h;
  695. layer.out_c = params.c;
  696. layer.out = params.inputs;
  697. layer.outputs[0].type = new darknet.TensorType('float32', make_shape([ layer.out ], 'detection'));
  698. break;
  699. }
  700. default: {
  701. infer = false;
  702. break;
  703. }
  704. }
  705. params.h = layer.out_h;
  706. params.w = layer.out_w;
  707. params.c = layer.out_c;
  708. params.inputs = layer.out;
  709. params.last = section;
  710. }
  711. params.arguments = layer.outputs;
  712. }
  713. for (let i = 0; i < sections.length; i++) {
  714. this._nodes.push(new darknet.Node(metadata, net, sections[i]));
  715. }
  716. if (weights) {
  717. weights.validate();
  718. }
  719. }
  720. get inputs() {
  721. return this._inputs;
  722. }
  723. get outputs() {
  724. return this._outputs;
  725. }
  726. get nodes() {
  727. return this._nodes;
  728. }
  729. };
  730. darknet.Parameter = class {
  731. constructor(name, visible, args) {
  732. this._name = name;
  733. this._visible = visible;
  734. this._arguments = args;
  735. }
  736. get name() {
  737. return this._name;
  738. }
  739. get visible() {
  740. return this._visible;
  741. }
  742. get arguments() {
  743. return this._arguments;
  744. }
  745. };
  746. darknet.Argument = class {
  747. constructor(name, type, initializer) {
  748. if (typeof name !== 'string') {
  749. throw new darknet.Error("Invalid argument identifier '" + JSON.stringify(name) + "'.");
  750. }
  751. this._name = name;
  752. this._type = type;
  753. this._initializer = initializer;
  754. }
  755. get name() {
  756. return this._name;
  757. }
  758. get type() {
  759. if (this._initializer) {
  760. return this._initializer.type;
  761. }
  762. return this._type;
  763. }
  764. set type(value) {
  765. if (this._type) {
  766. throw new darknet.Error('Invalid argument type set operation.');
  767. }
  768. this._type = value;
  769. }
  770. get initializer() {
  771. return this._initializer;
  772. }
  773. };
  774. darknet.Node = class {
  775. constructor(metadata, net, section) {
  776. this._name = section.name || '';
  777. this._location = section.line !== undefined ? section.line.toString() : undefined;
  778. this._metadata = metadata;
  779. this._type = section.type;
  780. this._attributes = [];
  781. this._inputs = [];
  782. this._outputs = [];
  783. this._chain = [];
  784. const layer = section.layer;
  785. if (layer && layer.inputs && layer.inputs.length > 0) {
  786. this._inputs.push(new darknet.Parameter(layer.inputs.length <= 1 ? 'input' : 'inputs', true, layer.inputs));
  787. }
  788. if (layer && layer.weights && layer.weights.length > 0) {
  789. this._inputs = this._inputs.concat(layer.weights);
  790. }
  791. if (layer && layer.outputs && layer.outputs.length > 0) {
  792. this._outputs.push(new darknet.Parameter(layer.outputs.length <= 1 ? 'output' : 'outputs', true, layer.outputs));
  793. }
  794. if (section.chain) {
  795. for (const chain of section.chain) {
  796. this._chain.push(new darknet.Node(metadata, net, chain, ''));
  797. }
  798. }
  799. const options = section.options;
  800. if (options) {
  801. for (const key of Object.keys(options)) {
  802. this._attributes.push(new darknet.Attribute(metadata.attribute(this._type, key), key, options[key]));
  803. }
  804. }
  805. }
  806. get name() {
  807. return this._name;
  808. }
  809. get location() {
  810. return this._location;
  811. }
  812. get type() {
  813. return this._type;
  814. }
  815. get metadata() {
  816. return this._metadata.type(this._type);
  817. }
  818. get attributes() {
  819. return this._attributes;
  820. }
  821. get inputs() {
  822. return this._inputs;
  823. }
  824. get outputs() {
  825. return this._outputs;
  826. }
  827. get chain() {
  828. return this._chain;
  829. }
  830. };
  831. darknet.Attribute = class {
  832. constructor(schema, name, value) {
  833. this._name = name;
  834. this._value = value;
  835. if (schema) {
  836. this._type = schema.type || '';
  837. switch (this._type) {
  838. case 'int32': {
  839. const number = parseInt(this._value, 10);
  840. if (Number.isInteger(number)) {
  841. this._value = number;
  842. }
  843. break;
  844. }
  845. case 'float32': {
  846. const number = parseFloat(this._value);
  847. if (!isNaN(number)) {
  848. this._value = number;
  849. }
  850. break;
  851. }
  852. case 'int32[]': {
  853. const numbers = this._value.split(',').map((item) => parseInt(item.trim(), 10));
  854. if (numbers.every((number) => Number.isInteger(number))) {
  855. this._value = numbers;
  856. }
  857. break;
  858. }
  859. }
  860. if (Object.prototype.hasOwnProperty.call(schema, 'visible') && !schema.visible) {
  861. this._visible = false;
  862. }
  863. else if (Object.prototype.hasOwnProperty.call(schema, 'default')) {
  864. if (this._value == schema.default) {
  865. this._visible = false;
  866. }
  867. }
  868. }
  869. }
  870. get name() {
  871. return this._name;
  872. }
  873. get type() {
  874. return this._type;
  875. }
  876. get value() {
  877. return this._value;
  878. }
  879. get visible() {
  880. return this._visible == false ? false : true;
  881. }
  882. };
  883. darknet.Tensor = class {
  884. constructor(type, data) {
  885. this._type = type;
  886. this._data = data;
  887. }
  888. get kind() {
  889. return 'Tensor';
  890. }
  891. get name() {
  892. return '';
  893. }
  894. get type() {
  895. return this._type;
  896. }
  897. get state() {
  898. return this._context().state;
  899. }
  900. get value() {
  901. const context = this._context();
  902. if (context.state) {
  903. return null;
  904. }
  905. context.limit = Number.MAX_SAFE_INTEGER;
  906. return this._decode(context, 0);
  907. }
  908. toString() {
  909. const context = this._context();
  910. if (context.state) {
  911. return '';
  912. }
  913. context.limit = 10000;
  914. const value = this._decode(context, 0);
  915. return JSON.stringify(value, null, 4);
  916. }
  917. _context() {
  918. const context = {};
  919. if (!this._data) {
  920. context.state = 'Tensor data is empty.';
  921. return context;
  922. }
  923. context.state = null;
  924. context.position = 0;
  925. context.count = 0;
  926. context.dataView = new DataView(this._data.buffer, this._data.byteOffset, this._data.byteLength);
  927. context.dimensions = this.type.shape.dimensions;
  928. return context;
  929. }
  930. _decode(context, dimension) {
  931. const results = [];
  932. const size = context.dimensions[dimension];
  933. if (dimension == context.dimensions.length - 1) {
  934. for (let i = 0; i < size; i++) {
  935. if (context.count > context.limit) {
  936. results.push('...');
  937. return results;
  938. }
  939. results.push(context.dataView.getFloat32(context.position, true));
  940. context.position += 4;
  941. context.count++;
  942. }
  943. }
  944. else {
  945. for (let j = 0; j < size; j++) {
  946. if (context.count > context.limit) {
  947. results.push('...');
  948. return results;
  949. }
  950. results.push(this._decode(context, dimension + 1));
  951. }
  952. }
  953. return results;
  954. }
  955. };
  956. darknet.TensorType = class {
  957. constructor(dataType, shape) {
  958. this._dataType = dataType;
  959. this._shape = shape;
  960. }
  961. get dataType() {
  962. return this._dataType;
  963. }
  964. get shape() {
  965. return this._shape;
  966. }
  967. toString() {
  968. return (this._dataType || '?') + this._shape.toString();
  969. }
  970. };
  971. darknet.TensorShape = class {
  972. constructor(dimensions) {
  973. if (dimensions.some((dimension) => dimension === 0 || dimension === undefined || isNaN(dimension))) {
  974. throw new darknet.Error("Invalid tensor shape '" + JSON.stringify(dimensions) + "'.");
  975. }
  976. this._dimensions = dimensions;
  977. }
  978. get dimensions() {
  979. return this._dimensions;
  980. }
  981. toString() {
  982. if (this._dimensions) {
  983. if (this._dimensions.length == 0) {
  984. return '';
  985. }
  986. return '[' + this._dimensions.map((dimension) => dimension.toString()).join(',') + ']';
  987. }
  988. return '';
  989. }
  990. };
  991. darknet.Weights = class {
  992. constructor(buffer) {
  993. this._buffer = buffer;
  994. this._dataView = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
  995. this._position = 0;
  996. const major = this.int32();
  997. const minor = this.int32();
  998. const revision = this.int32();
  999. this._seen = ((major * 10 + minor) >= 2) ? this.int64() : this.int32();
  1000. const transpose = (major > 1000) || (minor > 1000);
  1001. if (transpose) {
  1002. throw new darknet.Error("Unsupported transpose weights file version '" + [ major, minor, revision ].join('.') + "'.");
  1003. }
  1004. }
  1005. int32() {
  1006. const position = this._position;
  1007. this.skip(4);
  1008. return this._dataView.getInt32(position, true);
  1009. }
  1010. int64() {
  1011. const position = this._position;
  1012. this.skip(8);
  1013. return this._dataView.getInt64(position, true);
  1014. }
  1015. bytes(length) {
  1016. const position = this._position;
  1017. this.skip(length);
  1018. return this._buffer.subarray(position, this._position);
  1019. }
  1020. skip(offset) {
  1021. this._position += offset;
  1022. if (this._position > this._buffer.length) {
  1023. throw new darknet.Error('Expected ' + (this._position - this._buffer.length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
  1024. }
  1025. }
  1026. validate() {
  1027. if (this._position !== this._buffer.length) {
  1028. throw new darknet.Error('Invalid weights size.');
  1029. }
  1030. }
  1031. };
  1032. darknet.Metadata = class {
  1033. static open(host) {
  1034. if (darknet.Metadata._metadata) {
  1035. return Promise.resolve(darknet.Metadata._metadata);
  1036. }
  1037. return host.request(null, 'darknet-metadata.json', 'utf-8').then((data) => {
  1038. darknet.Metadata._metadata = new darknet.Metadata(data);
  1039. return darknet.Metadata._metadata;
  1040. }).catch(() => {
  1041. darknet.Metadata._metadata = new darknet.Metadata(null);
  1042. return darknet.Metadata._metadata;
  1043. });
  1044. }
  1045. constructor(data) {
  1046. this._map = new Map();
  1047. this._attributeMap = new Map();
  1048. if (data) {
  1049. const items = JSON.parse(data);
  1050. if (items) {
  1051. for (const item of items) {
  1052. if (item && item.name && item.schema) {
  1053. if (this._map.has(item.name)) {
  1054. throw new darknet.Error("Duplicate metadata key '" + item.name + "'.");
  1055. }
  1056. item.schema.name = item.name;
  1057. this._map.set(item.name, item.schema);
  1058. }
  1059. }
  1060. }
  1061. }
  1062. }
  1063. type(name) {
  1064. return this._map.get(name) || null;
  1065. }
  1066. attribute(type, name) {
  1067. const key = type + ':' + name;
  1068. if (!this._attributeMap.has(key)) {
  1069. this._attributeMap.set(key, null);
  1070. const schema = this.type(type);
  1071. if (schema && schema.attributes) {
  1072. for (const attribute of schema.attributes) {
  1073. this._attributeMap.set(type + ':' + attribute.name, attribute);
  1074. }
  1075. }
  1076. }
  1077. return this._attributeMap.get(key);
  1078. }
  1079. };
  1080. darknet.Error = class extends Error {
  1081. constructor(message) {
  1082. super(message);
  1083. this.name = 'Error loading Darknet model.';
  1084. }
  1085. };
  1086. if (typeof module !== 'undefined' && typeof module.exports === 'object') {
  1087. module.exports.ModelFactory = darknet.ModelFactory;
  1088. }