tf.js 100 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220
  1. // Experimental
  2. import * as base from './base.js';
  3. import * as protobuf from './protobuf.js';
  4. import * as zip from './zip.js';
  5. const tf = {};
  6. tf.ModelFactory = class {
  7. async match(context) {
  8. const identifier = context.identifier;
  9. const extension = identifier.lastIndexOf('.') > 0 ? identifier.split('.').pop().toLowerCase() : '';
  10. if (extension === 'pbtxt' || extension === 'prototxt' || extension === 'pt' || extension === 'txt') {
  11. if (identifier.endsWith('predict_net.pbtxt') || identifier.endsWith('predict_net.prototxt') ||
  12. identifier.endsWith('init_net.pbtxt') || identifier.endsWith('init_net.prototxt')) {
  13. return null;
  14. }
  15. const tags = await context.tags('pbtxt');
  16. if (['input_stream', 'output_stream', 'input_side_packet', 'output_side_packet'].some((key) => tags.has(key) || tags.has(`node.${key}`))) {
  17. return null;
  18. }
  19. if (tags.has('saved_model_schema_version') || tags.has('meta_graphs')) {
  20. return context.set('tf.pbtxt.SavedModel');
  21. }
  22. if (tags.has('graph_def')) {
  23. return context.set('tf.pbtxt.MetaGraphDef');
  24. }
  25. if (tags.has('node')) {
  26. return context.set('tf.pbtxt.GraphDef');
  27. }
  28. }
  29. if (extension === 'pb' || extension === 'pbtxt' || extension === 'prototxt' || extension === 'graphdef' || extension === 'meta') {
  30. if (identifier.endsWith('predict_net.pb') || identifier.endsWith('init_net.pb')) {
  31. return null;
  32. }
  33. if (identifier === 'tfhub_module.pb') {
  34. const stream = context.stream;
  35. const signature = [0x08, 0x03];
  36. if (signature.length === stream.length && stream.peek(signature.length).every((value, index) => value === signature[index])) {
  37. return null;
  38. }
  39. }
  40. const tags = await context.tags('pb');
  41. if (tags.size > 0) {
  42. if (Array.from(tags).every(([key, value]) => key < 8 && value !== 5)) {
  43. const match = (tags, schema) => {
  44. for (const [key, inner] of schema) {
  45. const value = tags[key];
  46. if (value === undefined) {
  47. continue;
  48. }
  49. if (inner === false) {
  50. return false;
  51. }
  52. if (Array.isArray(inner)) {
  53. if (typeof value !== 'object' || !match(value, inner)) {
  54. return false;
  55. }
  56. } else if (inner !== value) {
  57. if (inner === 2 && !Array.isArray(value) && Object(value) === (value) && Object.keys(value).length === 0) {
  58. return true;
  59. }
  60. return false;
  61. }
  62. }
  63. return true;
  64. };
  65. const signatureGraphDef = [
  66. [1 /* node */, [
  67. [1 /* name */, 2],
  68. [2 /* op */, 2],
  69. [3 /* input */, 2],
  70. [4 /* device */,2],
  71. [5 /* attr */, [
  72. [1,2],
  73. [2,[]]
  74. ]],
  75. [6 /* experimental_debug_info */, []]
  76. ]],
  77. [2 /* library */, []],
  78. [3 /* version */, 0],
  79. [4 /* versions */, [[1,0],[2,0]]]
  80. ];
  81. const signatureMetaGraphDef = [
  82. [1 /* meta_info_def */, [[1,2],[2,[]],[3,[]],/* [4,2], */[6,2],[7,0],[8,[]]]],
  83. [2 /* graph_def */, signatureGraphDef],
  84. [3 /* saver_def */, [[1,2],[2,2],[3,2],[4,0],[5,0],[6,5],[7,0]]],
  85. [4 /* collection_def */,[]],
  86. [5 /* signature_def */, []],
  87. [6 /* asset_file_def */, []],
  88. [7 /* object_graph_def */, []]
  89. ];
  90. const signatureSavedModel = [[1,0],[2,signatureMetaGraphDef]];
  91. // optimization_guide.proto.PageTopicsOverrideList
  92. if (identifier === 'override_list.pb' && tags.size === 1 && tags.get(1) === 2) {
  93. return null;
  94. }
  95. if (tags.size === 1 && tags.get(1) === 2) {
  96. const tags = await context.tags('pb+');
  97. // mediapipe.BoxDetectorIndex
  98. if (match(tags, [[1,[[1,[[1,[[1,5],[2,5],[3,5],[4,5],[6,0],[7,5],[8,5],[10,5],[11,0],[12,0]]],[2,5],[3,[]]]],[2,false],[3,false],[4,false],[5,false]]],[2,false],[3,false]])) {
  99. return null;
  100. }
  101. // third_party.tensorflow.python.keras.protobuf.SavedMetadata
  102. if (match(tags, [[1,[[1,[[1,0],[2,0]]],[2,0],[3,2],[4,2],[5,2]]]])) {
  103. return null;
  104. }
  105. }
  106. if ((!tags.has(1) || tags.get(1) === 0) && tags.get(2) === 2) {
  107. const tags = await context.tags('pb+');
  108. if (match(tags, signatureSavedModel)) {
  109. return context.set('tf.pb.SavedModel');
  110. }
  111. }
  112. if ((!tags.has(1) || tags.get(1) === 2) &&
  113. (!tags.has(2) || tags.get(2) === 2) &&
  114. (!tags.has(3) || tags.get(3) === 2) &&
  115. (!tags.has(4) || tags.get(4) === 2)) {
  116. const tags = await context.tags('pb+');
  117. if (match(tags, signatureMetaGraphDef)) {
  118. return context.set('tf.pb.MetaGraphDef');
  119. }
  120. }
  121. if (tags.get(1) !== 2) {
  122. const tags = await context.tags('pb+');
  123. if (match(tags, signatureGraphDef)) {
  124. return context.set('tf.pb.GraphDef');
  125. }
  126. }
  127. // tensorflow.FingerprintDef
  128. if (identifier === 'fingerprint.pb' &&
  129. tags.get(1) === 0 && tags.get(2) === 0 &&
  130. tags.get(3) === 0 && tags.get(5) === 0 && tags.get(6) === 2) {
  131. return context.set('tf.pb.FingerprintDef');
  132. }
  133. const decode = (buffer, value) => {
  134. try {
  135. const reader = protobuf.BinaryReader.open(buffer);
  136. const length = reader.length;
  137. while (reader.position < length) {
  138. const tag = reader.uint32();
  139. const number = tag >>> 3;
  140. const type = tag & 7;
  141. if (value === number) {
  142. return type === 2 ? reader.bytes() : null;
  143. }
  144. reader.skipType(type);
  145. }
  146. } catch {
  147. // continue regardless of error
  148. }
  149. return null;
  150. };
  151. const stream = context.stream;
  152. const buffer = stream.peek();
  153. const nodeBuffer = decode(buffer, 1);
  154. if (nodeBuffer) {
  155. const nameBuffer = decode(nodeBuffer, 1);
  156. if (nameBuffer) {
  157. const decoder = new TextDecoder('utf-8');
  158. const name = decoder.decode(nameBuffer);
  159. if (Array.from(name).filter((c) => c <= ' ').length < 256) {
  160. return context.set('tf.pb.GraphDef');
  161. }
  162. }
  163. }
  164. }
  165. } else {
  166. const tags = await context.tags('pbtxt');
  167. if (['input_stream', 'output_stream', 'input_side_packet', 'output_side_packet'].some((key) => tags.has(key) || tags.has(`node.${key}`))) {
  168. return null;
  169. }
  170. if (tags.has('node')) {
  171. return context.set('tf.pbtxt.GraphDef');
  172. }
  173. if (tags.has('graph_def')) {
  174. return context.set('tf.pbtxt.MetaGraphDef');
  175. }
  176. if (tags.has('saved_model_schema_version') || tags.has('meta_graphs')) {
  177. return context.set('tf.pbtxt.SavedModel');
  178. }
  179. }
  180. }
  181. if (extension === 'json') {
  182. for (const type of ['json', 'json.gz']) {
  183. /* eslint-disable no-await-in-loop */
  184. const obj = await context.peek(type);
  185. /* eslint-enable no-await-in-loop */
  186. if (obj && obj.modelTopology && (obj.format === 'graph-model' || Array.isArray(obj.modelTopology.node))) {
  187. return context.set(`tf.${type}`);
  188. }
  189. }
  190. }
  191. if (extension === 'index' || extension === 'ckpt') {
  192. const stream = context.stream;
  193. if (stream.length > 8) {
  194. stream.seek(-8);
  195. const buffer = stream.read(8);
  196. stream.seek(0);
  197. const signature = [0x57, 0xfb, 0x80, 0x8b, 0x24, 0x75, 0x47, 0xdb];
  198. if (buffer.every((value, index) => value === signature[index])) {
  199. return context.set('tf.bundle');
  200. }
  201. }
  202. }
  203. if (/.data-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9]$/.exec(identifier)) {
  204. return context.set('tf.data');
  205. }
  206. if (/^events.out.tfevents./.exec(identifier)) {
  207. const stream = context.stream;
  208. if (tf.EventFileReader.open(stream)) {
  209. return context.set('tf.events');
  210. }
  211. }
  212. if (extension === 'pbmm') {
  213. const stream = context.stream;
  214. if (stream.length > 8) {
  215. stream.seek(-8);
  216. const buffer = stream.read(8);
  217. stream.seek(0);
  218. const reader = base.BinaryReader.open(buffer);
  219. const offset = reader.uint64().toNumber();
  220. if (offset < stream.length) {
  221. return context.set('tf.pb.mmap');
  222. }
  223. }
  224. }
  225. if (/^.*group\d+-shard\d+of\d+(\.bin)?$/.test(identifier)) {
  226. return context.set('tf.tfjs.weights');
  227. }
  228. return null;
  229. }
  230. filter(context, match) {
  231. if (context.type === 'tf.bundle' && match.type === 'tf.data') {
  232. return false;
  233. }
  234. if ((context.type === 'tf.json' || context.type === 'tf.json.gz') && match.type === 'tf.tfjs.weights') {
  235. return false;
  236. }
  237. return true;
  238. }
  239. async open(context) {
  240. tf.proto = await context.require('./tf-proto');
  241. const openModel = async (saved_model, format, producer, bundle) => {
  242. const metadata = await context.metadata('tf-metadata.json');
  243. return new tf.Model(metadata, saved_model, format, producer, bundle);
  244. };
  245. const openSavedModel = async (context, saved_model, format, producer) => {
  246. if (format === '') {
  247. format = 'TensorFlow Saved Model';
  248. if (saved_model && saved_model.saved_model_schema_version) {
  249. format = `${format} v${saved_model.saved_model_schema_version}`;
  250. }
  251. }
  252. if (saved_model.meta_graphs.length === 1 &&
  253. saved_model.meta_graphs[0].object_graph_def &&
  254. saved_model.meta_graphs[0].object_graph_def.nodes &&
  255. saved_model.meta_graphs[0].object_graph_def.nodes.length > 0) {
  256. const identifier = 'variables/variables.index';
  257. try {
  258. const content = await context.fetch(identifier);
  259. const stream = content.stream;
  260. const bundle = await tf.TensorBundle.open(stream, identifier, context);
  261. return openModel(saved_model, format, producer, bundle);
  262. } catch {
  263. return openModel(saved_model, format, producer, null);
  264. }
  265. }
  266. if (saved_model && Array.isArray(saved_model.meta_graphs) && saved_model.meta_graphs.length > 0 &&
  267. saved_model.meta_graphs[0].meta_info_def &&
  268. Object.prototype.hasOwnProperty.call(saved_model.meta_graphs[0].meta_info_def, 'tensorflow_version')) {
  269. producer = `TensorFlow v${saved_model.meta_graphs[0].meta_info_def.tensorflow_version}`;
  270. }
  271. return openModel(saved_model, format, producer, null);
  272. };
  273. const openBundle = async (context, stream, identifier) => {
  274. stream = stream || context.stream;
  275. identifier = identifier || context.identifier;
  276. try {
  277. const bundle = await tf.TensorBundle.open(stream, identifier, context);
  278. return openModel(null, `TensorFlow Tensor Bundle v${bundle.format}`, null, bundle);
  279. } catch (error) {
  280. context.error(error, false);
  281. throw error;
  282. }
  283. };
  284. const openData = async (context) => {
  285. const identifier = context.identifier;
  286. const base = identifier.split('.');
  287. base.pop();
  288. const file = `${base.join('.')}.index`;
  289. try {
  290. const content = await context.fetch(file);
  291. const stream = content.stream;
  292. return openBundle(context, stream, file);
  293. } catch {
  294. const file = `${base.join('.')}.ckpt`;
  295. const content = await context.fetch(file);
  296. const stream = content.stream;
  297. return openBundle(context, stream, file);
  298. }
  299. };
  300. const openEventFile = async (context) => {
  301. let format = 'TensorFlow Event File';
  302. let producer = null;
  303. const stream = context.stream;
  304. const eventFileReader = tf.EventFileReader.open(stream);
  305. const saved_model = new tf.proto.tensorflow.SavedModel();
  306. const run_metadata = [];
  307. const summaries = [];
  308. for (;;) {
  309. const event = eventFileReader.read();
  310. if (!event) {
  311. break;
  312. }
  313. switch (event.what) {
  314. case 'file_version': {
  315. const formats = new Map([
  316. ['brain.Event:1', 'TensorFlow Event File v1'],
  317. ['brain.Event:2', 'TensorFlow Event File v2']
  318. ]);
  319. if (!formats.has(event.file_version)) {
  320. throw new tf.Error(`Unsupported event file version '${event.file_version}'.`);
  321. }
  322. format = formats.get(event.file_version);
  323. break;
  324. }
  325. case 'graph_def': {
  326. const buffer = event.graph_def;
  327. const reader = protobuf.BinaryReader.open(buffer);
  328. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  329. const meta_graph_def = new tf.proto.tensorflow.MetaGraphDef();
  330. meta_graph_def.meta_info_def = new tf.proto.tensorflow.MetaGraphDef.MetaInfoDef();
  331. meta_graph_def.meta_info_def.any_info = event.wall_time.toString();
  332. meta_graph_def.graph_def = graph_def;
  333. saved_model.meta_graphs.push(meta_graph_def);
  334. break;
  335. }
  336. case 'meta_graph_def': {
  337. const buffer = event.meta_graph_def;
  338. const reader = protobuf.BinaryReader.open(buffer);
  339. const meta_graph_def = tf.proto.tensorflow.MetaGraphDef.decode(reader);
  340. saved_model.meta_graphs.push(meta_graph_def);
  341. break;
  342. }
  343. case 'summary': {
  344. for (const value of event.summary.value) {
  345. summaries.push(value);
  346. }
  347. break;
  348. }
  349. case 'tagged_run_metadata': {
  350. const entry = event.tagged_run_metadata;
  351. const buffer = entry.run_metadata;
  352. const reader = protobuf.BinaryReader.open(buffer);
  353. const metadata = tf.proto.tensorflow.RunMetadata.decode(reader);
  354. run_metadata.push(metadata);
  355. break;
  356. }
  357. default: {
  358. throw new tf.Error(`Unsupported event type '${event.what}'.`);
  359. }
  360. }
  361. }
  362. if (saved_model.meta_graphs.every((meta_graph) => meta_graph.graph_def.node.every((node) => node.op.startsWith('aten::') || node.op.startsWith('prim::') || node.op.startsWith('quantized::') || node.op === 'IO Node'))) {
  363. producer = 'PyTorch';
  364. const openPyTorchMetadata = async (context, saved_model) => {
  365. try {
  366. const pytorch = await context.require('./pytorch');
  367. const python = await context.require('./python');
  368. const metadata = await pytorch.Metadata.open(context);
  369. const execution = new python.Execution();
  370. metadata.register(execution);
  371. const torch = execution.__import__('torch');
  372. for (const graph of saved_model.meta_graphs) {
  373. for (const node of graph.graph_def.node) {
  374. const schemas = torch._C._jit_get_schemas_for_operator(node.op);
  375. if (Array.isArray(schemas) && schemas.length > 0) {
  376. node.__metadata__ = schemas;
  377. node.__torch__ = torch;
  378. }
  379. }
  380. }
  381. } catch {
  382. // continue regardless of error
  383. }
  384. return saved_model;
  385. };
  386. const updated_saved_model = await openPyTorchMetadata(context, saved_model);
  387. return await openModel(updated_saved_model, format, producer, null);
  388. }
  389. return await openSavedModel(context, saved_model, format, producer);
  390. };
  391. const openJson = async (context, type) => {
  392. const obj = await context.peek(type);
  393. if (!obj || !obj.modelTopology || (obj.format !== 'graph-model' && !Array.isArray(obj.modelTopology.node))) {
  394. throw new tf.Error('File format is not TensorFlow.js graph-model.');
  395. }
  396. const format = `TensorFlow.js ${obj.format || 'graph-model'}`;
  397. const producer = obj.convertedBy || obj.generatedBy || '';
  398. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  399. meta_graph.graph_def = tf.proto.tensorflow.GraphDef.decodeJson(obj.modelTopology);
  400. const saved_model = new tf.proto.tensorflow.SavedModel();
  401. saved_model.meta_graphs.push(meta_graph);
  402. const nodes = new Map();
  403. for (const node of meta_graph.graph_def.node) {
  404. node.input = node.input || [];
  405. if (node.op === 'Const') {
  406. nodes.set(node.name, node);
  407. }
  408. }
  409. const shards = new Map();
  410. const manifests = Array.isArray(obj.weightsManifest) ? obj.weightsManifest : [];
  411. for (const manifest of manifests) {
  412. for (const path of manifest.paths) {
  413. if (!shards.has(path)) {
  414. shards.set(path, context.fetch(path));
  415. }
  416. }
  417. }
  418. const openShards = (shards) => {
  419. const dtype_size_map = new Map([
  420. ['float16', 2], ['float32', 4], ['float64', 8],
  421. ['int8', 1], ['int16', 2], ['int32', 4], ['int64', 8],
  422. ['uint8', 1], ['uint16', 2], ['uint32', 4], ['uint64', 8],
  423. ['bool', 1]
  424. ]);
  425. for (const manifest of manifests) {
  426. let buffer = null;
  427. if (Array.isArray(manifest.paths) && manifest.paths.length > 0 && manifest.paths.every((path) => shards.has(path))) {
  428. const list = manifest.paths.map((path) => shards.get(path));
  429. const size = list.reduce((a, b) => a + b.length, 0);
  430. buffer = new Uint8Array(size);
  431. let offset = 0;
  432. for (const item of list) {
  433. buffer.set(item, offset);
  434. offset += item.length;
  435. }
  436. }
  437. let offset = 0;
  438. for (const weight of manifest.weights) {
  439. const dtype = weight.quantization && weight.quantization.dtype ? weight.quantization.dtype : weight.dtype;
  440. const size = weight.shape.reduce((a, b) => a * b, 1);
  441. switch (dtype) {
  442. case 'string': {
  443. const data = [];
  444. if (buffer && size > 0) {
  445. const reader = new tf.BinaryReader(buffer.subarray(offset));
  446. for (let i = 0; i < size; i++) {
  447. data[i] = reader.string();
  448. }
  449. offset += reader.position;
  450. }
  451. if (nodes.has(weight.name)) {
  452. const node = nodes.get(weight.name);
  453. node.attr.value.tensor.dtype = tf.Utility.dataTypeKey(dtype);
  454. node.attr.value.tensor.string_val = data;
  455. }
  456. break;
  457. }
  458. default: {
  459. if (!dtype_size_map.has(dtype)) {
  460. throw new tf.Error(`Unsupported weight data type size '${dtype}'.`);
  461. }
  462. const itemsize = dtype_size_map.get(dtype);
  463. const length = itemsize * size;
  464. const tensor_content = buffer ? buffer.slice(offset, offset + length) : null;
  465. offset += length;
  466. if (nodes.has(weight.name)) {
  467. const node = nodes.get(weight.name);
  468. node.attr.value.tensor.dtype = tf.Utility.dataTypeKey(dtype);
  469. node.attr.value.tensor.tensor_content = tensor_content;
  470. }
  471. break;
  472. }
  473. }
  474. }
  475. }
  476. return openSavedModel(context, saved_model, format, producer);
  477. };
  478. try {
  479. const contexts = await Promise.all(shards.values());
  480. for (const key of shards.keys()) {
  481. const context = contexts.shift();
  482. const buffer = context.stream.peek();
  483. shards.set(key, buffer);
  484. }
  485. if (type === 'json.gz') {
  486. try {
  487. for (const key of shards.keys()) {
  488. const stream = shards.get(key);
  489. const archive = zip.Archive.open(stream, 'gzip');
  490. if (archive && archive.entries.size === 1) {
  491. const stream = archive.entries.values().next().value;
  492. const buffer = stream.peek();
  493. shards.set(key, buffer);
  494. }
  495. }
  496. } catch {
  497. // continue regardless of error
  498. }
  499. }
  500. return openShards(shards);
  501. } catch {
  502. shards.clear();
  503. return openShards(shards);
  504. }
  505. };
  506. const openJsonWeights = async (context) => {
  507. const content = await context.fetch('model.json');
  508. return await openJson(content, 'json');
  509. };
  510. const openTextGraphDef = async (context) => {
  511. try {
  512. const reader = await context.read('protobuf.text');
  513. const graph_def = tf.proto.tensorflow.GraphDef.decodeText(reader);
  514. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  515. meta_graph.graph_def = graph_def;
  516. const saved_model = new tf.proto.tensorflow.SavedModel();
  517. saved_model.meta_graphs.push(meta_graph);
  518. const format = 'TensorFlow Graph';
  519. return openSavedModel(context, saved_model, format, null);
  520. } catch (error) {
  521. const message = error && error.message ? error.message : error.toString();
  522. throw new tf.Error(`File text format is not tensorflow.GraphDef (${message.replace(/\.$/, '')}).`);
  523. }
  524. };
  525. const openTextMetaGraphDef = async (context) => {
  526. try {
  527. const reader = await context.read('protobuf.text');
  528. const meta_graph = tf.proto.tensorflow.MetaGraphDef.decodeText(reader);
  529. const saved_model = new tf.proto.tensorflow.SavedModel();
  530. saved_model.meta_graphs.push(meta_graph);
  531. const format = 'TensorFlow MetaGraph';
  532. return openSavedModel(context, saved_model, format, null);
  533. } catch (error) {
  534. throw new tf.Error(`File text format is not tensorflow.MetaGraphDef (${error.message}).`);
  535. }
  536. };
  537. const openTextSavedModel = async (context) => {
  538. try {
  539. const reader = await context.read('protobuf.text');
  540. return tf.proto.tensorflow.SavedModel.decodeText(reader);
  541. } catch (error) {
  542. throw new tf.Error(`File text format is not tensorflow.SavedModel (${error.message}).`);
  543. }
  544. };
  545. const openBinaryGraphDef = async (context) => {
  546. let saved_model = null;
  547. const format = 'TensorFlow Graph';
  548. try {
  549. const reader = await context.read('protobuf.binary');
  550. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  551. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  552. meta_graph.graph_def = graph_def;
  553. saved_model = new tf.proto.tensorflow.SavedModel();
  554. saved_model.meta_graphs.push(meta_graph);
  555. } catch (error) {
  556. const message = error && error.message ? error.message : error.toString();
  557. throw new tf.Error(`File format is not tensorflow.GraphDef (${message.replace(/\.$/, '')}).`);
  558. }
  559. return openSavedModel(context, saved_model, format, null);
  560. };
  561. const openBinaryMetaGraphDef = async (context) => {
  562. let saved_model = null;
  563. const format = 'TensorFlow MetaGraph';
  564. try {
  565. const reader = await context.read('protobuf.binary');
  566. const meta_graph = tf.proto.tensorflow.MetaGraphDef.decode(reader);
  567. saved_model = new tf.proto.tensorflow.SavedModel();
  568. saved_model.meta_graphs.push(meta_graph);
  569. } catch (error) {
  570. const message = error && error.message ? error.message : error.toString();
  571. throw new tf.Error(`File format is not tensorflow.MetaGraphDef (${message.replace(/\.$/, '')}).`);
  572. }
  573. return openSavedModel(context, saved_model, format, null);
  574. };
  575. const openBinarySavedModel = async (context) => {
  576. try {
  577. const reader = await context.read('protobuf.binary');
  578. return tf.proto.tensorflow.SavedModel.decode(reader);
  579. } catch (error) {
  580. const message = error && error.message ? error.message : error.toString();
  581. throw new tf.Error(`File format is not tensorflow.SavedModel (${message.replace(/\.$/, '')}).`);
  582. }
  583. };
  584. const openFingerprint = async (context) => {
  585. let format = '';
  586. let saved_model = null;
  587. try {
  588. const identifier = 'saved_model.pb';
  589. const content = await context.fetch(identifier);
  590. saved_model = await openBinarySavedModel(content);
  591. } catch {
  592. format = 'TensorFlow Fingerprint';
  593. saved_model = new tf.proto.tensorflow.SavedModel();
  594. }
  595. const reader = await context.read('protobuf.binary');
  596. saved_model.fingerprint = tf.proto.tensorflow.FingerprintDef.decode(reader);
  597. return await openSavedModel(context, saved_model, format, null);
  598. };
  599. const openMemmapped = async (context) => {
  600. const stream = context.stream;
  601. const readDirectoryOffset = (stream) => {
  602. stream.seek(-8);
  603. stream = stream.stream(8);
  604. const reader = base.BinaryReader.open(stream);
  605. return reader.uint64().toNumber();
  606. };
  607. const readDirectory = (stream, offset) => {
  608. const end = stream.position - 8;
  609. stream.seek(offset);
  610. stream = stream.stream(end - offset);
  611. const reader = protobuf.BinaryReader.open(stream);
  612. return tf.proto.tensorflow.MemmappedFileSystemDirectory.decode(reader);
  613. };
  614. const offset = readDirectoryOffset(stream);
  615. const directory = readDirectory(stream, offset);
  616. const elements = new Map();
  617. for (const element of directory.element) {
  618. const name = element.name;
  619. if (elements.has(name)) {
  620. throw new tf.Error(`Memory mapped file directory contains duplicate '${name}'.`);
  621. }
  622. elements.set(name, {
  623. offset: typeof element.offset === 'bigint' ? Number(element.offset) : element.offset,
  624. length: typeof element.length === 'bigint' ? Number(element.length) : element.length
  625. });
  626. }
  627. const offsets = Array.from(elements).map(([, value]) => value.offset);
  628. offsets.push(offset);
  629. for (const value of elements.values()) {
  630. if (value.length === 0) {
  631. const min = Math.min.apply(null, offsets.filter((offset) => offset > value.offset));
  632. if (Number.isInteger(min)) {
  633. value.length = min - value.offset;
  634. }
  635. }
  636. }
  637. for (const [, value] of elements) {
  638. const offset = value.offset;
  639. const length = value.length;
  640. stream.seek(offset);
  641. value.buffer = stream.read(length);
  642. }
  643. if (!elements.has('memmapped_package://.')) {
  644. throw new tf.Error('Memory mapped file directory does not contain tensorflow.GraphDef root.');
  645. }
  646. const element = elements.get('memmapped_package://.');
  647. const buffer = element.buffer;
  648. const reader = protobuf.BinaryReader.open(buffer);
  649. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  650. const format = 'TensorFlow GraphDef Memmapped';
  651. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  652. meta_graph.graph_def = graph_def;
  653. const saved_model = new tf.proto.tensorflow.SavedModel();
  654. saved_model.meta_graphs.push(meta_graph);
  655. return openSavedModel(context, saved_model, format, null);
  656. };
  657. switch (context.type) {
  658. case 'tf.bundle':
  659. return await openBundle(context);
  660. case 'tf.data':
  661. return await openData(context);
  662. case 'tf.events':
  663. return await openEventFile(context);
  664. case 'tf.json':
  665. return await openJson(context, 'json');
  666. case 'tf.json.gz':
  667. return await openJson(context, 'json.gz');
  668. case 'tf.tfjs.weights':
  669. return await openJsonWeights(context);
  670. case 'tf.pbtxt.GraphDef':
  671. return await openTextGraphDef(context);
  672. case 'tf.pbtxt.MetaGraphDef':
  673. return await openTextMetaGraphDef(context);
  674. case 'tf.pbtxt.SavedModel':
  675. return await openSavedModel(context, await openTextSavedModel(context), '', null);
  676. case 'tf.pb.GraphDef':
  677. return await openBinaryGraphDef(context);
  678. case 'tf.pb.MetaGraphDef':
  679. return await openBinaryMetaGraphDef(context);
  680. case 'tf.pb.SavedModel':
  681. return await openSavedModel(context, await openBinarySavedModel(context), '', null);
  682. case 'tf.pb.FingerprintDef':
  683. return await openFingerprint(context);
  684. case 'tf.pb.mmap':
  685. return await openMemmapped(context);
  686. default:
  687. throw new tf.Error(`Unsupported TensorFlow format '${context.type}'.`);
  688. }
  689. }
  690. };
  691. tf.Model = class {
  692. constructor(metadata, model, format, producer, bundle) {
  693. this.format = format;
  694. this.producer = producer || '';
  695. this.modules = [];
  696. if (model) {
  697. for (let i = 0; i < model.meta_graphs.length; i++) {
  698. const meta_graph = model.meta_graphs[i];
  699. let name = '';
  700. if (meta_graph.meta_info_def && meta_graph.meta_info_def.any_info) {
  701. name = meta_graph.meta_info_def.any_info.toString();
  702. } else if (model.meta_graphs.length > 1) {
  703. name = i.toString();
  704. }
  705. const graph = new tf.Graph(metadata, meta_graph, name, bundle);
  706. this.modules.push(graph);
  707. }
  708. } else {
  709. const graph = new tf.Graph(metadata, null, '', bundle);
  710. this.modules.push(graph);
  711. }
  712. }
  713. };
  714. tf.Graph = class {
  715. constructor(metadata, meta_graph, name, bundle) {
  716. this.name = name;
  717. this.nodes = [];
  718. this.inputs = [];
  719. this.outputs = [];
  720. this.functions = [];
  721. this.signatures = [];
  722. this.version = null;
  723. this.metadata = [];
  724. this.groups = false;
  725. if (meta_graph && meta_graph.graph_def) {
  726. const graph = meta_graph.graph_def;
  727. if (graph.versions) {
  728. this.version = `v${graph.versions.producer}`;
  729. } else if (graph.version) {
  730. this.version = graph.version;
  731. } else if (meta_graph.meta_info_def && meta_graph.meta_info_def.tensorflow_version) {
  732. this.version = meta_graph.meta_info_def.tensorflow_version;
  733. }
  734. if (meta_graph.meta_info_def && Array.isArray(meta_graph.meta_info_def.tags) && meta_graph.meta_info_def.tags.length > 0) {
  735. this.metadata.push(new tf.Argument('tags', meta_graph.meta_info_def.tags.join(', ')));
  736. }
  737. const output_arg_map = new Map();
  738. metadata = new tf.GraphMetadata(metadata, graph.library);
  739. this.functions = metadata.functions;
  740. const context = new tf.Context();
  741. const resolveTensorInfoName = (tensor) => {
  742. if (tensor) {
  743. if (tensor.name) {
  744. return tensor.name;
  745. }
  746. if (tensor.coo_sparse && tensor.coo_sparse.values_tensor_name) {
  747. return tensor.coo_sparse.values_tensor_name;
  748. }
  749. if (tensor.composite_tensor && Array.isArray(tensor.composite_tensor.components) && tensor.composite_tensor.components.length > 0) {
  750. return resolveTensorInfoName(tensor.composite_tensor.components[0]);
  751. }
  752. }
  753. return '';
  754. };
  755. for (const [key, signature_def] of Object.entries(meta_graph.signature_def)) {
  756. const inputs = [];
  757. for (const [key, tensor] of Object.entries(signature_def.inputs)) {
  758. const type = new tf.TensorType(tensor.dtype, tensor.tensor_shape);
  759. const name = resolveTensorInfoName(tensor).replace(/:0$/, '');
  760. const value = context.value(name, type);
  761. const argument = new tf.Argument(key, [value]);
  762. inputs.push(argument);
  763. }
  764. const outputs = [];
  765. for (const [key, tensor] of Object.entries(signature_def.outputs)) {
  766. const type = new tf.TensorType(tensor.dtype, tensor.tensor_shape);
  767. const name = resolveTensorInfoName(tensor).replace(/:0$/, '');
  768. const value = context.value(name, type);
  769. const argument = new tf.Argument(key, [value]);
  770. outputs.push(argument);
  771. output_arg_map.set(name, key);
  772. }
  773. const signature = new tf.Signature(key, inputs, outputs);
  774. this.signatures.push(signature);
  775. }
  776. const nodes = graph.node || [];
  777. context.graph(metadata, nodes, output_arg_map);
  778. this.nodes = context.nodes;
  779. this.inputs = context.inputs;
  780. this.outputs = context.outputs;
  781. } else if (bundle) {
  782. const nodes = new Map();
  783. for (const tensor of bundle.tensors) {
  784. const parts = tensor.name.split('/');
  785. if (bundle.format === 2) {
  786. if (tensor.name === '_CHECKPOINTABLE_OBJECT_GRAPH' ||
  787. tensor.name.startsWith('optimizer/') ||
  788. tensor.name.startsWith('keras_api/metrics/') ||
  789. tensor.name.endsWith('/ExponentialMovingAverage') ||
  790. tensor.name.indexOf('.OPTIMIZER_SLOT') !== -1) {
  791. continue;
  792. }
  793. if (tensor.name.endsWith('/.ATTRIBUTES/VARIABLE_VALUE')) {
  794. parts.pop();
  795. parts.pop();
  796. }
  797. }
  798. const tensorName = parts.pop();
  799. const name = parts.join('/');
  800. if (!nodes.has(name)) {
  801. nodes.set(name, []);
  802. }
  803. nodes.get(name).push({ name: tensorName, value: tensor });
  804. }
  805. const namespaces = new Set();
  806. this.nodes = Array.from(nodes).map(([name, value]) => {
  807. const node = { op: 'Node', name };
  808. return new tf.Node(metadata, node, namespaces, new tf.Context(), value);
  809. });
  810. }
  811. }
  812. };
  813. tf.Signature = class {
  814. constructor(name, inputs, outputs) {
  815. this.name = name;
  816. this.inputs = inputs;
  817. this.outputs = outputs;
  818. }
  819. };
  820. tf.Argument = class {
  821. constructor(name, value, type = null, visible = true) {
  822. this.name = name;
  823. this.value = value;
  824. this.type = type;
  825. this.visible = visible;
  826. }
  827. };
  828. tf.Value = class {
  829. constructor(name, type, initializer = null) {
  830. if (typeof name !== 'string') {
  831. throw new tf.Error(`Invalid value identifier '${JSON.stringify(name)}'.`);
  832. }
  833. this.name = name;
  834. this.type = !type && initializer ? initializer.type : type;
  835. this.initializer = initializer;
  836. }
  837. };
  838. tf.Function = class {
  839. constructor(metadata, name, func) {
  840. this.type = 'function';
  841. this.name = name;
  842. this.version = null;
  843. this.tags = null;
  844. this.nodes = [];
  845. this.inputs = [];
  846. this.outputs = [];
  847. this.description = func ? null : 'Function definition not found.';
  848. this.groups = false;
  849. const context = new tf.Context();
  850. const input_arg = func && func.signature ? func.signature.input_arg : [];
  851. const output_arg = func && func.signature ? func.signature.output_arg : [];
  852. const ret = func && func.ret ? func.ret : {};
  853. const nodes = func && func.node_def ? func.node_def : [];
  854. if (input_arg) {
  855. for (const input of input_arg) {
  856. const value = context.value(input.name, new tf.TensorType(input.type, null), null);
  857. const argument = new tf.Argument(input.name, [value]);
  858. this.inputs.push(argument);
  859. }
  860. }
  861. const output_arg_map = new Map();
  862. if (output_arg) {
  863. const ret_map = new Map();
  864. for (const key of Object.keys(ret)) {
  865. const value = func.ret[key];
  866. const split = value.split(':', 2);
  867. ret_map.set(key, split[0]);
  868. }
  869. for (const output of output_arg) {
  870. const name = ret_map.get(output.name);
  871. const type = new tf.TensorType(output.type, null);
  872. const value = context.value(name, type, null);
  873. const argument = new tf.Argument(output.name, [value]);
  874. this.outputs.push(argument);
  875. output_arg_map.set(name, output.name);
  876. }
  877. }
  878. context.graph(metadata, nodes, output_arg_map);
  879. this.nodes = context.nodes;
  880. }
  881. };
  882. tf.Node = class {
  883. constructor(metadata, node, namespaces, context, tensors) {
  884. this.type = node.metadata || metadata.type(node.op) || { name: node.op };
  885. this.name = node.name;
  886. this.attributes = [];
  887. this.inputs = [];
  888. this.outputs = [];
  889. this.group = '';
  890. if (node.name) {
  891. if (namespaces.has(node.name)) {
  892. this.group = node.name;
  893. } else {
  894. const index = node.name.lastIndexOf('/');
  895. if (index !== -1) {
  896. const namespace = node.name.substring(0, index);
  897. if (namespaces.has(namespace)) {
  898. this.group = namespace;
  899. }
  900. }
  901. }
  902. }
  903. if (tensors) {
  904. for (const tensor of tensors) {
  905. const value = context.value(tensor.value.name, null, tensor.value);
  906. const argument = new tf.Argument(tensor.name, [value]);
  907. this.inputs.push(argument);
  908. }
  909. } else {
  910. if (node.device !== undefined) {
  911. this.device = node.device;
  912. }
  913. if (node.attr) {
  914. this.attributes = Object.entries(node.attr).map(([name, obj]) => {
  915. const schema = obj && obj.metadata ? obj.metadata : metadata.attribute(node.op, name);
  916. let value = null;
  917. let type = schema && typeof schema.type === 'string' ? schema.type : null;
  918. let visible = metadata.visible(node.op, name);
  919. switch (obj.value) {
  920. case undefined:
  921. type = '';
  922. value = null;
  923. break;
  924. case 'type':
  925. type = 'type';
  926. value = tf.Utility.dataType(obj.type);
  927. break;
  928. case 'i':
  929. value = obj.i;
  930. break;
  931. case 'f':
  932. value = obj.f;
  933. break;
  934. case 'b':
  935. value = obj.b;
  936. break;
  937. case 'shape':
  938. type = 'shape';
  939. value = new tf.TensorShape(obj.shape);
  940. break;
  941. case 's':
  942. value = tf.Utility.decodeText(obj.s);
  943. break;
  944. case 'tensor': {
  945. type = 'tensor';
  946. value = new tf.Tensor(obj.tensor);
  947. break;
  948. }
  949. case 'func': {
  950. type = 'function';
  951. value = metadata.type(obj.func.name);
  952. // type = 'object';
  953. // value = new tf.Node(metadata, { op: obj.func.name, attr: obj.func.attr }, null, new tf.Context());
  954. break;
  955. }
  956. case 'placeholder': {
  957. type = 'placeholder';
  958. value = obj;
  959. break;
  960. }
  961. case 'list': {
  962. const list = obj.list;
  963. if (list.s && list.s.length > 0) {
  964. value = list.s.map((s) => tf.Utility.decodeText(s));
  965. } else if (list.i && list.i.length > 0) {
  966. value = list.i;
  967. } else if (list.f && list.f.length > 0) {
  968. value = list.f;
  969. } else if (list.type && list.type.length > 0) {
  970. type = 'type[]';
  971. value = list.type.map((type) => tf.Utility.dataType(type));
  972. } else if (list.shape && list.shape.length > 0) {
  973. type = 'shape[]';
  974. value = list.shape.map((shape) => new tf.TensorShape(shape));
  975. } else if (list.func && list.func.length > 0) {
  976. type = 'function[]';
  977. value = list.func.map((func) => new tf.Node(metadata, { op: func.name, attr: func.attr }));
  978. } else {
  979. value = [];
  980. }
  981. break;
  982. }
  983. default: {
  984. throw new tf.Error(`Unsupported attribute value type '${JSON.stringify(value).substring(0, 32)}'.`);
  985. }
  986. }
  987. if (schema) {
  988. if (schema.visible === false) {
  989. visible = false;
  990. } else if (schema.default !== undefined) {
  991. const equals = (value, defaultValue) => {
  992. if (!Array.isArray(defaultValue) && defaultValue === Object(defaultValue)) {
  993. switch (defaultValue.type) {
  994. case 'type':
  995. defaultValue = tf.Utility.dataType(defaultValue.value);
  996. break;
  997. case 'shape':
  998. case 'tensor':
  999. defaultValue = defaultValue.value;
  1000. break;
  1001. default:
  1002. throw new tf.Error(JSON.stringify(defaultValue));
  1003. }
  1004. }
  1005. if (typeof value === 'boolean' || typeof value === 'number' || typeof value === 'string') {
  1006. return value === defaultValue;
  1007. }
  1008. if (typeof value === 'bigint') {
  1009. return Number(value) === defaultValue;
  1010. }
  1011. return false;
  1012. };
  1013. const defaultValue = schema.default;
  1014. if (Array.isArray(value) && Array.isArray(defaultValue)) {
  1015. if (value.length === defaultValue.length && value.every((item, index) => equals(item, defaultValue[index]))) {
  1016. visible = false;
  1017. }
  1018. } else if (equals(value, defaultValue)) {
  1019. visible = false;
  1020. }
  1021. }
  1022. }
  1023. if (name === '_class' || name === '_output_shapes' || visible === false) {
  1024. visible = false;
  1025. }
  1026. return new tf.Argument(name, value, type, visible);
  1027. });
  1028. }
  1029. let inputIndex = 0;
  1030. const inputs = (node.input || []).filter((input) => !input.name.startsWith('^'));
  1031. if (this.type && this.type.inputs) {
  1032. for (const input of this.type.inputs) {
  1033. let count = 1;
  1034. if (input.numberAttr) {
  1035. const inputNumber = node.attr[input.numberAttr];
  1036. if (inputNumber && inputNumber.i) {
  1037. count = Number(inputNumber.i);
  1038. }
  1039. } else if (input.typeListAttr) {
  1040. const inputTypeListAttr = node.attr[input.typeListAttr];
  1041. if (inputTypeListAttr && inputTypeListAttr.list && inputTypeListAttr.list.type) {
  1042. count = inputTypeListAttr.list.type.length;
  1043. }
  1044. }
  1045. const values = inputs.slice(inputIndex, inputIndex + count).map((input) => context.value(input.name, null, null));
  1046. const argument = new tf.Argument(input.name, values);
  1047. this.inputs.push(argument);
  1048. inputIndex += count;
  1049. }
  1050. }
  1051. this.inputs.push(...inputs.slice(inputIndex).map((input, index) => {
  1052. const name = input.label ? input.label : (inputIndex + index).toString();
  1053. return new tf.Argument(name, [context.value(input.name)]);
  1054. }));
  1055. let outputIndex = 0;
  1056. const outputs = node.output || [];
  1057. if (this.type && this.type.outputs) {
  1058. for (const output of this.type.outputs) {
  1059. let count = 1;
  1060. if (output.numberAttr) {
  1061. const outputNumber = node.attr[output.numberAttr];
  1062. if (outputNumber && outputNumber.i) {
  1063. count = Number(outputNumber.i);
  1064. }
  1065. } else if (output.typeListAttr) {
  1066. const outputTypeListAttr = node.attr[output.typeListAttr];
  1067. if (outputTypeListAttr && outputTypeListAttr.list && outputTypeListAttr.list.type) {
  1068. count = outputTypeListAttr.list.type.length;
  1069. }
  1070. }
  1071. const values = outputs.slice(outputIndex, outputIndex + count).map((output) => {
  1072. return context.value(output.name ? output.name : '-', null, null);
  1073. });
  1074. const name = output.name ? output.name : `output${this.outputs.length === 0 ? '' : this.outputs.length}`;
  1075. const argument = new tf.Argument(name, values);
  1076. this.outputs.push(argument);
  1077. outputIndex += count;
  1078. }
  1079. }
  1080. this.outputs.push(...outputs.slice(outputIndex).map((output, index) => {
  1081. const name = (outputIndex + index).toString();
  1082. const value = context.value(output.name ? output.name : '-', null, null);
  1083. return new tf.Argument(name, [value]);
  1084. }));
  1085. const controlDependencies = node.controlDependencies || [];
  1086. this.controlDependencies = controlDependencies.map((input) => context.value(input.name));
  1087. }
  1088. }
  1089. };
  1090. tf.Tensor = class {
  1091. constructor(tensor, name, category = null) {
  1092. this.name = name;
  1093. this.category = category;
  1094. if (tensor) {
  1095. this.type = new tf.TensorType(tensor.dtype, tensor.tensor_shape || tensor.tensorShape);
  1096. this._tensor = tensor;
  1097. if (Object.prototype.hasOwnProperty.call(tensor, 'tensor_content')) {
  1098. this._values = tensor.tensor_content;
  1099. this.encoding = '<';
  1100. } else {
  1101. const DataType = tf.proto.tensorflow.DataType;
  1102. switch (tensor.dtype) {
  1103. case DataType.DT_INVALID: {
  1104. break;
  1105. }
  1106. case DataType.DT_BFLOAT16: {
  1107. const values = tensor.half_val || [];
  1108. this._values = new Uint8Array(values.length << 2);
  1109. const view = new DataView(this._values.buffer, this._values.byteOffset, this._values.byteLength);
  1110. for (let i = 0; i < values.length; i++) {
  1111. view.setUint32(i << 2, values[i] << 16, true);
  1112. }
  1113. this.encoding = '<';
  1114. break;
  1115. }
  1116. case DataType.DT_HALF: {
  1117. const values = tensor.half_val || [];
  1118. this._values = new Uint8Array(values.length << 1);
  1119. const view = new DataView(this._values.buffer, this._values.byteOffset, this._values.byteLength);
  1120. for (let i = 0; i < values.length; i++) {
  1121. view.setUint16(i << 1, values[i], true);
  1122. }
  1123. this.encoding = '<';
  1124. break;
  1125. }
  1126. case DataType.DT_FLOAT: {
  1127. this._values = tensor.float_val || null;
  1128. this.encoding = '|';
  1129. break;
  1130. }
  1131. case DataType.DT_DOUBLE: {
  1132. this._values = tensor.double_val || null;
  1133. this.encoding = '|';
  1134. break;
  1135. }
  1136. case DataType.DT_UINT8:
  1137. case DataType.DT_UINT16:
  1138. case DataType.DT_INT8:
  1139. case DataType.DT_INT16:
  1140. case DataType.DT_INT32: {
  1141. this._values = tensor.int_val || null;
  1142. this.encoding = '|';
  1143. break;
  1144. }
  1145. case DataType.DT_UINT32: {
  1146. this._values = tensor.uint32_val || null;
  1147. this.encoding = '|';
  1148. break;
  1149. }
  1150. case DataType.DT_INT64: {
  1151. this._values = tensor.int64_val || null;
  1152. this.encoding = '|';
  1153. break;
  1154. }
  1155. case DataType.DT_UINT64: {
  1156. this._values = tensor.uint64_val || null;
  1157. this.encoding = '|';
  1158. break;
  1159. }
  1160. case DataType.DT_BOOL: {
  1161. this._values = tensor.bool_val || null;
  1162. this.encoding = '|';
  1163. break;
  1164. }
  1165. case DataType.DT_STRING: {
  1166. this._values = tensor.string_val || null;
  1167. this.encoding = '|';
  1168. break;
  1169. }
  1170. case DataType.DT_COMPLEX64: {
  1171. const values = tensor.scomplex_val || null;
  1172. this._values = new Array(values.length >> 1);
  1173. for (let i = 0; i < values.length; i += 2) {
  1174. this._values[i >> 1] = new base.Complex(values[i], values[i + 1]);
  1175. }
  1176. this.encoding = '|';
  1177. break;
  1178. }
  1179. case DataType.DT_COMPLEX128: {
  1180. const values = tensor.dcomplex_val || null;
  1181. this._values = new Array(values.length >> 1);
  1182. for (let i = 0; i < values.length; i += 2) {
  1183. this._values[i >> 1] = new base.Complex(values[i], values[i + 1]);
  1184. }
  1185. this.encoding = '|';
  1186. break;
  1187. }
  1188. case DataType.DT_FLOAT8_E5M2:
  1189. case DataType.DT_FLOAT8_E4M3FN:
  1190. case DataType.DT_FLOAT8_E4M3FNUZ:
  1191. case DataType.DT_FLOAT8_E4M3B11FNUZ:
  1192. case DataType.DT_FLOAT8_E5M2FNUZ: {
  1193. this._values = tensor.float8_val || null;
  1194. this.encoding = '<';
  1195. break;
  1196. }
  1197. default: {
  1198. throw new tf.Error(`Unsupported tensor data type '${tensor.dtype}'.`);
  1199. }
  1200. }
  1201. }
  1202. } else {
  1203. this.type = new tf.TensorType('?', null);
  1204. this._tensor = null;
  1205. }
  1206. }
  1207. get values() {
  1208. let values = this._values;
  1209. if (this.encoding === '|' && Array.isArray(values)) {
  1210. if (this.type.dataType === 'string') {
  1211. values = values.map((value) => tf.Utility.decodeText(value));
  1212. }
  1213. const shape = (this._tensor.tensor_shape || this._tensor.tensorShape).dim.map((dim) => dim.size);
  1214. const size = shape.reduce((a, b) => a * Number(b), 1);
  1215. if (values.length === 1 && size > 1) {
  1216. values = new Array(size).fill(values[0]);
  1217. }
  1218. }
  1219. return values;
  1220. }
  1221. };
  1222. tf.TensorType = class {
  1223. constructor(dtype, shape) {
  1224. this.dataType = dtype ? tf.Utility.dataType(dtype) : '?';
  1225. this.shape = new tf.TensorShape(shape);
  1226. }
  1227. equals(obj) {
  1228. return obj && this.dataType === obj.dataType && this.shape.equals(obj.shape);
  1229. }
  1230. toString() {
  1231. return this.dataType + this.shape.toString();
  1232. }
  1233. };
  1234. tf.TensorShape = class {
  1235. constructor(shape) {
  1236. this.dimensions = null;
  1237. if (shape) {
  1238. if (shape.unknown_rank) {
  1239. this.dimensions = null;
  1240. } else if (Array.isArray(shape.dim)) {
  1241. if (shape.dim.length === 0) {
  1242. this.dimensions = [];
  1243. } else if (shape.dim.length === 1 && !shape.dim[0].size) {
  1244. this.dimensions = [0];
  1245. } else {
  1246. this.dimensions = shape.dim.map((dim) => {
  1247. const size = dim.size && dim.size.toNumber ? dim.size.toNumber() : dim.size;
  1248. return size && size !== -1 ? size : '?';
  1249. });
  1250. }
  1251. }
  1252. }
  1253. }
  1254. equals(obj) {
  1255. return (this.dimensions === null && obj.dimensions === null) || (Array.isArray(this.dimensions) && Array.isArray(obj.dimensions) && this.dimensions.length === obj.dimensions.length && this.dimensions.every((value, index) => obj.dimensions[index] === value));
  1256. }
  1257. toString() {
  1258. if (this.dimensions === null) {
  1259. return '[?]';
  1260. }
  1261. if (this.dimensions.length === 0) {
  1262. return '';
  1263. }
  1264. return `[${this.dimensions.map((dim) => (dim && dim !== -1) ? dim.toString() : '?').join(',')}]`;
  1265. }
  1266. };
  1267. tf.TensorBundle = class {
  1268. static async open(stream, identifier, context) {
  1269. const format = identifier.toLowerCase().endsWith('.index') ? 2 : 1;
  1270. const table = new tf.TensorBundle.Table(stream);
  1271. if (!table.entries.has('')) {
  1272. throw new tf.Error('Bundle header not available.');
  1273. }
  1274. if (format === 1) {
  1275. return new tf.TensorBundle(format, table.entries, []);
  1276. }
  1277. const buffer = table.entries.get('');
  1278. const reader = protobuf.BinaryReader.open(buffer);
  1279. const header = tf.proto.tensorflow.BundleHeaderProto.decode(reader);
  1280. const numShards = header.num_shards;
  1281. const promises = [];
  1282. for (let i = 0; i < numShards; i++) {
  1283. const shardIndex = (`0000${i}`).slice(-5);
  1284. const shardCount = (`0000${numShards}`).slice(-5);
  1285. const filename = identifier.split('.');
  1286. filename.pop();
  1287. const basename = filename.join('.');
  1288. const name = `${basename}.data-${shardIndex}-of-${shardCount}`;
  1289. promises.push(context.fetch(name));
  1290. }
  1291. try {
  1292. const contexts = await Promise.all(promises);
  1293. const streams = contexts.map((context) => context.stream);
  1294. return new tf.TensorBundle(format, table.entries, streams);
  1295. } catch (error) {
  1296. context.error(error, false);
  1297. return new tf.TensorBundle(format, table.entries, null);
  1298. }
  1299. }
  1300. constructor(format, entries, streams) {
  1301. this.format = format;
  1302. this.tensors = [];
  1303. switch (format) {
  1304. case 1: {
  1305. const buffer = entries.get('');
  1306. const reader = protobuf.BinaryReader.open(buffer);
  1307. const header = tf.proto.tensorflow.SavedTensorSlices.decode(reader);
  1308. const data = new Map();
  1309. for (const [name, buffer] of entries) {
  1310. if (name !== '' && name !== 'global_step') {
  1311. const reader = protobuf.BinaryReader.open(buffer);
  1312. const slices = tf.proto.tensorflow.SavedTensorSlices.decode(reader);
  1313. const name = slices.data.name;
  1314. const tensor = slices.data.data;
  1315. if (data.has(name)) {
  1316. const item = data.get(name);
  1317. if (item !== null) {
  1318. if (tensor[item.key] && tensor[item.key].length > 0) {
  1319. item.value = item.value.concat(tensor[item.key]);
  1320. } else {
  1321. data.set(name, null);
  1322. }
  1323. }
  1324. } else if (tensor.tensor_content && tensor.tensor_content.length > 0) {
  1325. data.set(name, { key: 'tensor_content', value: tensor.tensor_content });
  1326. } else {
  1327. const keys = Object.keys(tensor).filter((key) => key.endsWith('_val') && tensor[key] && tensor[key].length > 0);
  1328. data.set(name, keys.length === 1 ? { key: keys[0], value: tensor[keys[0]] } : null);
  1329. }
  1330. }
  1331. }
  1332. for (const meta of header.meta.tensor) {
  1333. if (meta.name !== 'global_step') {
  1334. const tensor = new tf.proto.tensorflow.TensorProto();
  1335. tensor.dtype = meta.type;
  1336. tensor.tensor_shape = meta.shape;
  1337. const item = data.get(meta.name);
  1338. if (item) {
  1339. tensor[item.key] = item.value;
  1340. }
  1341. this.tensors.push(new tf.Tensor(tensor, meta.name, null));
  1342. }
  1343. }
  1344. break;
  1345. }
  1346. case 2: {
  1347. entries.forEach((buffer, name) => {
  1348. if (name !== '') {
  1349. const reader = protobuf.BinaryReader.open(buffer);
  1350. const entry = tf.proto.tensorflow.BundleEntryProto.decode(reader);
  1351. const tensor = new tf.proto.tensorflow.TensorProto();
  1352. tensor.dtype = entry.dtype;
  1353. tensor.tensor_shape = entry.shape;
  1354. const offset = typeof entry.offset === 'bigint' ? Number(entry.offset) : entry.offset;
  1355. const size = typeof entry.size === 'bigint' ? Number(entry.size) : entry.size;
  1356. if (streams) {
  1357. const stream = streams[entry.shard_id];
  1358. stream.seek(offset);
  1359. tensor.tensor_content = stream.peek(size);
  1360. }
  1361. this.tensors.push(new tf.Tensor(tensor, name, null));
  1362. }
  1363. });
  1364. break;
  1365. }
  1366. default: {
  1367. throw new tf.Error(`Unsupported Tensor Bundle format '${format}'.`);
  1368. }
  1369. }
  1370. }
  1371. };
  1372. tf.TensorBundle.Table = class {
  1373. constructor(stream) {
  1374. // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/io/table.cc
  1375. this.entries = new Map();
  1376. if (stream.length <= 54) {
  1377. throw new tf.Error('Invalid index file size.');
  1378. }
  1379. stream.seek(-48);
  1380. const buffer = stream.peek(48);
  1381. const reader = new tf.BinaryReader(buffer);
  1382. reader.seek(-8);
  1383. const signature = [0x57, 0xfb, 0x80, 0x8b, 0x24, 0x75, 0x47, 0xdb];
  1384. if (!reader.read(8).every((value, index) => value === signature[index])) {
  1385. throw new tf.Error('Invalid table signature.');
  1386. }
  1387. reader.seek(-48); // kEncodedLength
  1388. reader.varint64(); // metaindex offset
  1389. reader.varint64(); // metaindex size
  1390. const indexOffset = reader.varint64();
  1391. const indexSize = reader.varint64();
  1392. const indexBlock = new tf.TensorBundle.Table.Block(stream, indexOffset, indexSize);
  1393. for (const [, value] of indexBlock.entries) {
  1394. const valueReader = new tf.BinaryReader(value);
  1395. const offset = valueReader.varint64();
  1396. const size = valueReader.varint64();
  1397. const block = new tf.TensorBundle.Table.Block(stream, offset, size);
  1398. for (const [name, value] of block.entries) {
  1399. this.entries.set(name, value);
  1400. }
  1401. }
  1402. stream.seek(0);
  1403. }
  1404. };
  1405. tf.TensorBundle.Table.Block = class {
  1406. constructor(stream, offset, size) {
  1407. // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/io/block.cc
  1408. this.entries = new Map();
  1409. stream.seek(offset);
  1410. const buffer = stream.read(size); // blockContents
  1411. const [compression] = stream.read(1);
  1412. stream.skip(4); // crc32
  1413. let reader = new tf.BinaryReader(buffer);
  1414. switch (compression) {
  1415. case 0: // kNoCompression
  1416. break;
  1417. case 1: // kSnappyCompression
  1418. reader = new tf.BinaryReader(reader.unsnappy());
  1419. break;
  1420. default:
  1421. throw new tf.Error(`Unsupported block compression '${compression}'.`);
  1422. }
  1423. reader.seek(-4);
  1424. const numRestarts = reader.int32();
  1425. reader.seek(-4 - (4 * numRestarts));
  1426. const restartOffsets = [];
  1427. for (let i = 0; i < numRestarts; i++) {
  1428. restartOffsets.push(reader.int32());
  1429. }
  1430. const decoder = new TextDecoder('utf-8');
  1431. for (let i = 0; i < numRestarts; i++) {
  1432. reader.seek(restartOffsets[i]);
  1433. let key = '';
  1434. while (reader.position < reader.length) {
  1435. const sharedSize = reader.varint32(); // index shared size
  1436. const nonSharedSize = reader.varint32(); // index non shared size
  1437. const valueSize = reader.varint32();
  1438. if (sharedSize === 0 && nonSharedSize === 0 && valueSize === 0) {
  1439. break;
  1440. }
  1441. key = key.substring(0, sharedSize);
  1442. key += decoder.decode(reader.read(nonSharedSize));
  1443. const value = reader.read(valueSize);
  1444. this.entries.set(key, value);
  1445. }
  1446. }
  1447. }
  1448. };
  1449. tf.BinaryReader = class {
  1450. constructor(buffer) {
  1451. this._reader = base.BinaryReader.open(buffer);
  1452. this._decoder = new TextDecoder('utf-8');
  1453. }
  1454. get length() {
  1455. return this._reader.length;
  1456. }
  1457. get position() {
  1458. return this._reader.position;
  1459. }
  1460. seek(position) {
  1461. this._reader.seek(position);
  1462. }
  1463. read(length) {
  1464. return this._reader.read(length);
  1465. }
  1466. byte() {
  1467. return this._reader.byte();
  1468. }
  1469. int32() {
  1470. return this._reader.int32();
  1471. }
  1472. uint32() {
  1473. return this._reader.uint32();
  1474. }
  1475. string() {
  1476. const size = this.uint32();
  1477. const buffer = this.read(size);
  1478. return this._decoder.decode(buffer);
  1479. }
  1480. varint32() {
  1481. return this.varint64();
  1482. }
  1483. varint64() {
  1484. let result = 0;
  1485. for (let shift = 0; shift <= 63; shift += 7) {
  1486. const byte = this.byte();
  1487. if (byte & 128) {
  1488. result |= (byte & 127) << shift;
  1489. } else {
  1490. result |= byte << shift;
  1491. break;
  1492. }
  1493. }
  1494. return result;
  1495. }
  1496. unsnappy() {
  1497. const data = new Uint8Array(this.varint64());
  1498. const mask = [0, 0xff, 0xffff, 0xffffff, 0xffffffff];
  1499. let position = 0;
  1500. while (this._position < this._length) {
  1501. let length = 0;
  1502. const c = this.byte();
  1503. switch (c & 0x03) {
  1504. case 0: {
  1505. length = (c >>> 2) + 1;
  1506. if (length > 60) {
  1507. const short = length - 60;
  1508. length = (this.uint32() & mask[short]) + 1;
  1509. this._position += short - 4;
  1510. }
  1511. data.set(this.read(length), position);
  1512. break;
  1513. }
  1514. case 1: {
  1515. length = ((c >>> 2) & 0x07) + 4;
  1516. const offset = this.byte() + ((c >>> 5) << 8);
  1517. data.set(data.subarray(position - offset, position - offset + length), position);
  1518. break;
  1519. }
  1520. case 2: {
  1521. length = (c >>> 2) + 1;
  1522. const offset = this.uint16();
  1523. data.set(data.subarray(position - offset, position - offset + length), position);
  1524. break;
  1525. }
  1526. case 3: {
  1527. length = (c >>> 2) + 1;
  1528. const offset = this.uint32();
  1529. data.set(data.subarray(position - offset, position - offset + length), position);
  1530. break;
  1531. }
  1532. default: {
  1533. break;
  1534. }
  1535. }
  1536. position += length;
  1537. }
  1538. return data;
  1539. }
  1540. };
  1541. tf.EventFileReader = class {
  1542. static open(stream) {
  1543. if (stream.length < 16) {
  1544. return null;
  1545. }
  1546. const masked_crc32c = (bytes) => {
  1547. const poly = 0x82f63b78;
  1548. let crc = 0xffffffff;
  1549. for (let n = 0; n < bytes.length; n++) {
  1550. crc ^= bytes[n];
  1551. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1552. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1553. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1554. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1555. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1556. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1557. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1558. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1559. crc >>>= 0;
  1560. }
  1561. crc ^= 0xffffffff;
  1562. crc >>>= 0;
  1563. crc = ((crc >> 15) | (crc << 17)) + 0xa282ead8;
  1564. crc >>>= 0;
  1565. return crc;
  1566. };
  1567. const buffer = stream.peek(12);
  1568. const reader = new tf.BinaryReader(buffer);
  1569. const length_bytes = reader.read(8);
  1570. const length_crc = reader.uint32();
  1571. if (masked_crc32c(length_bytes) !== length_crc) {
  1572. return null;
  1573. }
  1574. return new tf.EventFileReader(stream);
  1575. }
  1576. constructor(stream) {
  1577. this._stream = stream;
  1578. }
  1579. read() {
  1580. if (this._stream.position < this._stream.length) {
  1581. const uint64 = (stream) => {
  1582. const buffer = stream.read(8);
  1583. const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
  1584. const value = view.getBigUint64(0, true);
  1585. return value.toNumber();
  1586. };
  1587. const length = uint64(this._stream);
  1588. this._stream.skip(4); // masked crc of length
  1589. const buffer = this._stream.read(length);
  1590. const reader = protobuf.BinaryReader.open(buffer);
  1591. const event = tf.proto.tensorflow.Event.decode(reader);
  1592. this._stream.skip(4); // masked crc of data
  1593. return event;
  1594. }
  1595. return null;
  1596. }
  1597. };
  1598. tf.GraphMetadata = class {
  1599. constructor(metadata, library) {
  1600. this._metadata = metadata;
  1601. this._functions = new Map();
  1602. this._attributes = new Map();
  1603. this._visibleCache = new Map();
  1604. if (library && Array.isArray(library.function) && library.function.length > 0) {
  1605. for (const func of library.function) {
  1606. const name = func.signature.name;
  1607. if (this._functions.has(func.name)) {
  1608. throw new tf.Error(`Duplicate function name '${func.name}'.`);
  1609. }
  1610. this._functions.set(name, func);
  1611. }
  1612. }
  1613. }
  1614. type(name) {
  1615. if (this._functions.has(name)) {
  1616. const func = this._functions.get(name);
  1617. if (func instanceof tf.Function) {
  1618. return func;
  1619. }
  1620. this._functions.set(name, new tf.Function(this, func.signature.name, func));
  1621. return this._functions.get(name);
  1622. }
  1623. const type = this._metadata.type(name);
  1624. if (!type) {
  1625. this._functions.set(name, new tf.Function(this, name, null));
  1626. return this._functions.get(name);
  1627. }
  1628. return type;
  1629. }
  1630. attribute(type, name) {
  1631. const key = `${type}::${name}`;
  1632. if (!this._attributes.has(key)) {
  1633. const schema = this.type(type);
  1634. if (schema && schema.attributes) {
  1635. for (const attribute of schema.attributes) {
  1636. const key = `${type}::${attribute.name}`;
  1637. this._attributes.set(key, attribute);
  1638. }
  1639. }
  1640. }
  1641. return this._attributes.get(key);
  1642. }
  1643. visible(type, name) {
  1644. if (!this._visibleCache.has(type)) {
  1645. const set = new Set();
  1646. const schema = this.type(type);
  1647. if (schema && schema.inputs) {
  1648. for (const input of schema.inputs) {
  1649. if (input.typeAttr) {
  1650. set.add(input.typeAttr);
  1651. } else if (input.typeListAttr) {
  1652. set.add(input.typeListAttr);
  1653. }
  1654. if (input.numberAttr) {
  1655. set.add(input.numberAttr);
  1656. }
  1657. }
  1658. }
  1659. if (schema && schema.outputs) {
  1660. for (const output of schema.outputs) {
  1661. if (output.typeAttr) {
  1662. set.add(output.typeAttr);
  1663. } else if (output.typeListAttr) {
  1664. set.add(output.typeListAttr);
  1665. }
  1666. if (output.numberAttr) {
  1667. set.add(output.numberAttr);
  1668. }
  1669. }
  1670. }
  1671. this._visibleCache.set(type, set);
  1672. }
  1673. return !this._visibleCache.get(type).has(name);
  1674. }
  1675. get functions() {
  1676. for (const [name, func] of this._functions) {
  1677. if (func instanceof tf.Function === false) {
  1678. this._functions.set(name, new tf.Function(this, func.signature.name, func));
  1679. }
  1680. }
  1681. return Array.from(this._functions.values());
  1682. }
  1683. };
  1684. tf.Context = class {
  1685. constructor() {
  1686. this._values = new Map();
  1687. this.signatures = [];
  1688. this.nodes = [];
  1689. }
  1690. value(name, type, tensor) {
  1691. if (name.length === 0 && tensor) {
  1692. return new tf.Value(name, type || null, tensor);
  1693. }
  1694. if (!this._values.has(name)) {
  1695. this._values.set(name, new tf.Value(name, type || null, tensor || null));
  1696. } else if ((type && !type.equals(this._values.get(name).type)) || tensor) {
  1697. throw new tf.Error(`Duplicate value '${name}'.`);
  1698. }
  1699. return this._values.get(name);
  1700. }
  1701. graph(metadata, nodes, output_arg_map) {
  1702. const namespaces = new Set();
  1703. nodes = new Map(nodes.map((node) => [node.name, node]));
  1704. this.inputs = [];
  1705. this.outputs = [];
  1706. for (const [name, node] of nodes) {
  1707. if (node.op !== 'Const') {
  1708. const index = name.lastIndexOf('/');
  1709. if (index !== -1) {
  1710. const namespace = name.substring(0, index);
  1711. namespaces.add(namespace);
  1712. }
  1713. }
  1714. node.output = [];
  1715. }
  1716. const node_output = (input) => {
  1717. let name = input;
  1718. let index = 0;
  1719. const control = name.startsWith('^');
  1720. if (control) {
  1721. name = name.substring(1);
  1722. }
  1723. const colon = name.lastIndexOf(':');
  1724. if (colon !== -1) {
  1725. const suffix = name.substring(colon + 1);
  1726. const candidate = name.substring(0, colon);
  1727. const value = parseInt(suffix, 10);
  1728. if (!isNaN(value) && nodes.has(candidate) && !nodes.has(name)) {
  1729. index = value;
  1730. name = candidate;
  1731. }
  1732. }
  1733. const from = nodes.get(name);
  1734. if (from) {
  1735. for (let i = from.output.length; i <= index; i++) {
  1736. const key = i === 0 ? from.name : `${from.name}:${i}`;
  1737. const value = { name: key, to: [] };
  1738. from.output.push(value);
  1739. }
  1740. }
  1741. const key = index === 0 ? name : `${name}:${index}`;
  1742. return [key, index, control, from];
  1743. };
  1744. for (const node of nodes.values()) {
  1745. const inputs = node.input;
  1746. node.input = [];
  1747. node.controlDependencies = [];
  1748. for (const input of inputs) {
  1749. const [key, index, control, from] = node_output(input);
  1750. if (from) {
  1751. from.output[index].to.push(node);
  1752. }
  1753. const value = { name: key, from };
  1754. if (control) {
  1755. node.controlDependencies.push(value);
  1756. } else {
  1757. node.input.push(value);
  1758. }
  1759. }
  1760. }
  1761. if (output_arg_map) {
  1762. for (const [name, node] of nodes) {
  1763. if (output_arg_map.has(name)) {
  1764. node.output.push({ name, to: [] });
  1765. }
  1766. }
  1767. }
  1768. const map_tensor = (name, node, kind) => {
  1769. if (node && node.op === 'Const' && node.input.length === 0 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  1770. const value = node.attr.value;
  1771. if (value && Object.prototype.hasOwnProperty.call(value, 'tensor')) {
  1772. const tensor = new tf.Tensor(value.tensor, name, kind);
  1773. return this.value(name, tensor.type, tensor);
  1774. }
  1775. }
  1776. return null;
  1777. };
  1778. const map_resource = (name, node, tensor) => {
  1779. if (node && node.op === 'Placeholder' && node.input.length === 0 && node.output.length === 1 && node.controlDependencies.length === 0) {
  1780. const dtype = node.attr.dtype.type;
  1781. if (dtype === tf.proto.tensorflow.DataType.DT_RESOURCE) {
  1782. return this.value(name, null, tensor);
  1783. }
  1784. }
  1785. return null;
  1786. };
  1787. for (const node of nodes.values()) {
  1788. if (node.op === 'Identity' && node.input.length === 1 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  1789. const initializer = map_tensor(node.name, node.input[0].from, 'Identity Constant');
  1790. if (initializer) {
  1791. nodes.delete(initializer.name);
  1792. nodes.delete(node.input[0].name);
  1793. }
  1794. const identity = node.input[0].from;
  1795. if (identity && identity.op === 'Identity' && identity.input.length === 1 && identity.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  1796. const initializer = map_tensor(node.name, identity.input[0].from, 'Identity Constant');
  1797. if (initializer) {
  1798. nodes.delete(initializer.name);
  1799. nodes.delete(initializer.name);
  1800. nodes.delete(identity.name);
  1801. nodes.delete(node.name);
  1802. }
  1803. }
  1804. }
  1805. }
  1806. for (const node of nodes.values()) {
  1807. const initializer = map_tensor(node.name, node, 'Const');
  1808. if (initializer) {
  1809. nodes.delete(node.name);
  1810. nodes.delete(initializer.name);
  1811. }
  1812. }
  1813. for (const node of nodes.values()) {
  1814. if (node.op === 'ReadVariableOp' && node.input.length === 1 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  1815. if (node.attr && node.attr.dtype && node.attr._output_shapes && node.attr._output_shapes.list && node.attr._output_shapes.list.shape) {
  1816. const tensor = new tf.proto.tensorflow.TensorProto();
  1817. tensor.dtype = node.attr.dtype.type;
  1818. [tensor.tensor_shape] = node.attr._output_shapes.list.shape;
  1819. const name = node.name;
  1820. const initializer = map_resource(name, node.input[0].from, new tf.Tensor(tensor, name, 'Resource Variable'));
  1821. if (initializer) {
  1822. nodes.delete(initializer.name);
  1823. nodes.delete(node.input[0].name);
  1824. }
  1825. }
  1826. }
  1827. }
  1828. const inputs = new Map();
  1829. for (const [name, node] of nodes) {
  1830. if (node.op === 'Placeholder' && node.attr && node.attr.dtype && Number.isInteger(node.attr.dtype.type) &&
  1831. node.attr._output_shapes && node.attr._output_shapes.list && Array.isArray(node.attr._output_shapes.list.shape) && node.attr._output_shapes.list.shape.length > 0 &&
  1832. node.input.length === 0 && node.output.length === 1 && node.controlDependencies.length === 0) {
  1833. const type = new tf.TensorType(node.attr.dtype.type, node.attr._output_shapes.list.shape[0]);
  1834. const value = this.value(name, type, null);
  1835. const argument = new tf.Argument(name, [value]);
  1836. inputs.set(name, argument);
  1837. nodes.delete(name);
  1838. }
  1839. }
  1840. const updateTorchScript = (nodes) => {
  1841. for (const node of nodes.values()) {
  1842. if (node.op === 'prim::Constant' && node.input.length === 0 && node.controlDependencies.length === 0 && node.attr && Object.keys(node.attr).length === 1 && node.attr.attr && node.attr.attr.s) {
  1843. const value = tf.Utility.decodeText(node.attr.attr.s);
  1844. const match = /{\s*value\s*:\s*(.*)\s*}/.exec(value);
  1845. if (match) {
  1846. node.value = match[1].trim();
  1847. }
  1848. const empty = /{\s*}/.exec(value);
  1849. if (empty) {
  1850. node.value = null;
  1851. }
  1852. }
  1853. if (node.op === 'prim::GetAttr' && node.input.length === 1 && node.controlDependencies.length === 0 && node.attr && Object.keys(node.attr).length === 1 && node.attr.attr && node.attr.attr.s) {
  1854. const value = tf.Utility.decodeText(node.attr.attr.s);
  1855. const match = /{\s*name\s*:\s*([A-Za-z0-9_]*)\s*}/.exec(value);
  1856. if (match) {
  1857. node.value = match[1].trim();
  1858. }
  1859. }
  1860. if (node.op === 'IO Node' && node.controlDependencies.length === 0) {
  1861. const shape = node.attr && node.attr._output_shapes && node.attr._output_shapes.list && node.attr._output_shapes.list.shape ? node.attr._output_shapes.list.shape[0] : null;
  1862. const type = shape ? new tf.TensorType('?', shape) : null;
  1863. if (node.input.length === 0 && node.output.length === 1) {
  1864. const argument = new tf.Argument(node.name, [this.value(node.output[0].name, type, null)]);
  1865. this.inputs.push(argument);
  1866. nodes.delete(node.name);
  1867. }
  1868. if (node.input.length === 1 && node.output.length === 0) {
  1869. const argument = new tf.Argument(node.name, [this.value(node.input[0].name, type, null)]);
  1870. this.outputs.push(argument);
  1871. nodes.delete(node.name);
  1872. }
  1873. }
  1874. if (Object.keys(node.attr).length === 2 &&
  1875. node.attr.attr && node.attr.attr.s && node.attr._output_shapes) {
  1876. const value = tf.Utility.decodeText(node.attr.attr.s);
  1877. if (/\s*/.exec(value) || /{\s*}/.exec(value)) {
  1878. node.attr = {};
  1879. delete node._output_shapes;
  1880. }
  1881. }
  1882. }
  1883. const remove_input = (input, node) => {
  1884. const from = input.from;
  1885. if (from) {
  1886. for (const output of from.output) {
  1887. output.to = output.to.filter((to) => to !== node);
  1888. }
  1889. if (from.output.every((output) => output.to.length === 0) && from.controlDependencies.length === 0) {
  1890. from.remove = true;
  1891. }
  1892. delete input.from;
  1893. }
  1894. };
  1895. for (const node of nodes.values()) {
  1896. if (node.op === 'prim::ListConstruct' && node.input.every((input) => input.from.value !== undefined) && node.controlDependencies.length === 0) {
  1897. node.value = node.input.map((input) => input.from.value);
  1898. for (const input of node.input) {
  1899. remove_input(input, node);
  1900. }
  1901. node.input = [];
  1902. }
  1903. }
  1904. for (const node of nodes.values()) {
  1905. const remove = new Set();
  1906. for (let i = 0; i < node.input.length; i++) {
  1907. const input = node.input[i];
  1908. const from = input.from;
  1909. if (from) {
  1910. if (from.op === 'prim::GetAttr' && from.input.length === 1 && from.output.length === 1 && from.controlDependencies.length === 0 && from.value !== undefined) {
  1911. remove_input(input, node);
  1912. input.label = from.value;
  1913. const tensor = new tf.Tensor(null, input.name, from.op);
  1914. this.value(input.name, null, tensor);
  1915. }
  1916. if (from.op === 'prim::Constant' && from.input.length === 0 && from.controlDependencies.length === 0 && from.value !== undefined) {
  1917. input.constant = from.value;
  1918. remove_input(input, node);
  1919. remove.add(input.name);
  1920. }
  1921. if (from.op === 'prim::ListConstruct' && from.output.length === 1 && from.controlDependencies.length === 0 && from.value !== undefined) {
  1922. input.list = from.value;
  1923. remove_input(input, node);
  1924. remove.add(input.name);
  1925. }
  1926. }
  1927. }
  1928. if (node.__metadata__) {
  1929. const torch = node.__torch__;
  1930. const match = (node, schema) => {
  1931. const args = schema.arguments || [];
  1932. const inputs = node.input || [];
  1933. if (inputs.length > args.length) {
  1934. return false;
  1935. }
  1936. for (let i = 0; i < inputs.length; i++) {
  1937. const input = inputs[i];
  1938. const arg = args[i];
  1939. let type = arg.real_type;
  1940. type = type instanceof torch.OptionalType ? type.getElementType() : type;
  1941. switch (type.str()) {
  1942. case 'Tensor': {
  1943. if ((input.constant === undefined && input.list === undefined) || input.constant === null) {
  1944. continue;
  1945. }
  1946. break;
  1947. }
  1948. case 'int':
  1949. case 'SymInt': {
  1950. if (input.constant !== undefined &&
  1951. Number.isInteger(parseInt(input.constant, 10))) {
  1952. continue;
  1953. }
  1954. break;
  1955. }
  1956. case 'float': {
  1957. if (input.constant !== undefined && !isNaN(parseFloat(input.constant))) {
  1958. continue;
  1959. }
  1960. break;
  1961. }
  1962. case 'int[]':
  1963. case 'int[2]':
  1964. case 'SymInt[]':
  1965. case 'SymInt[2]': {
  1966. if (Array.isArray(input.list)) {
  1967. const list = input.list.map((item) => parseInt(item, 10));
  1968. if (list.every((value) => Number.isInteger(value))) {
  1969. continue;
  1970. }
  1971. }
  1972. break;
  1973. }
  1974. case 'bool': {
  1975. if (input.constant === 'false' ||
  1976. input.constant === 'true' ||
  1977. input.constant === '0' ||
  1978. input.constant === '1') {
  1979. continue;
  1980. }
  1981. break;
  1982. }
  1983. case 'Scalar': {
  1984. if (input.constant !== undefined &&
  1985. Number.isInteger(parseInt(input.constant, 10))) {
  1986. continue;
  1987. }
  1988. break;
  1989. }
  1990. default: {
  1991. break;
  1992. }
  1993. }
  1994. return false;
  1995. }
  1996. return true;
  1997. };
  1998. const schema = node.__metadata__.find((schema) => match(node, schema));
  1999. if (schema) {
  2000. const args = schema.arguments;
  2001. const inputs = node.input || [];
  2002. for (let i = 0; i < inputs.length; i++) {
  2003. const input = inputs[i];
  2004. delete input.metadata;
  2005. const arg = args[i];
  2006. let type = arg.real_type;
  2007. type = type instanceof torch.OptionalType ? type.getElementType() : type;
  2008. switch (type.str()) {
  2009. case 'Tensor': {
  2010. input.metadata = arg;
  2011. break;
  2012. }
  2013. case 'int':
  2014. case 'SymInt': {
  2015. const value = parseInt(input.constant, 10);
  2016. input.attr = new tf.proto.tensorflow.AttrValue();
  2017. input.attr.i = value;
  2018. input.attr.metadata = arg;
  2019. break;
  2020. }
  2021. case 'float': {
  2022. const value = parseFloat(input.constant, 10);
  2023. input.attr = new tf.proto.tensorflow.AttrValue();
  2024. input.attr.f = value;
  2025. input.attr.metadata = arg;
  2026. break;
  2027. }
  2028. case 'int[]':
  2029. case 'int[2]':
  2030. case 'SymInt[]':
  2031. case 'SymInt[2]': {
  2032. const list = input.list.map((item) => parseInt(item, 10));
  2033. input.attr = new tf.proto.tensorflow.AttrValue();
  2034. input.attr.list = new tf.proto.tensorflow.ListValue();
  2035. input.attr.list.i = list;
  2036. input.attr.metadata = arg;
  2037. break;
  2038. }
  2039. case 'bool': {
  2040. input.attr = new tf.proto.tensorflow.AttrValue();
  2041. input.attr.b = input.constant === 'true' || input.constant === '1';
  2042. input.attr.metadata = arg;
  2043. break;
  2044. }
  2045. case 'Scalar': {
  2046. const value = parseInt(input.constant, 10);
  2047. input.attr = new tf.proto.tensorflow.AttrValue();
  2048. input.attr.i = value;
  2049. input.attr.metadata = arg;
  2050. break;
  2051. }
  2052. default: {
  2053. break;
  2054. }
  2055. }
  2056. }
  2057. node.metadata = { ...schema };
  2058. node.metadata.name = node.op;
  2059. }
  2060. }
  2061. node.input = node.input.filter((input, index) => {
  2062. if (input.attr) {
  2063. const name = input.attr.metadata ? input.attr.metadata.name : index.toString();
  2064. node.attr[name] = input.attr;
  2065. } else if (input.constant !== undefined && input.constant !== null) {
  2066. const attr = new tf.proto.tensorflow.AttrValue();
  2067. attr.s = input.constant;
  2068. node.attr[index.toString()] = attr;
  2069. } else if (input.list !== undefined) {
  2070. const attr = new tf.proto.tensorflow.AttrValue();
  2071. attr.list = new tf.proto.tensorflow.ListValue();
  2072. attr.list.s = input.list;
  2073. node.attr[index.toString()] = attr;
  2074. }
  2075. return !remove.has(input.name);
  2076. });
  2077. }
  2078. for (const node of nodes.values()) {
  2079. if (node.op === 'prim::GetAttr' && node.remove) {
  2080. nodes.delete(node.name);
  2081. }
  2082. if (node.op === 'prim::Constant' && node.remove) {
  2083. nodes.delete(node.name);
  2084. }
  2085. if (node.op === 'prim::ListConstruct' && node.remove) {
  2086. nodes.delete(node.name);
  2087. }
  2088. }
  2089. };
  2090. updateTorchScript(nodes);
  2091. for (const input of inputs.values()) {
  2092. this.inputs.push(input);
  2093. }
  2094. for (const node of nodes.values()) {
  2095. this.nodes.push(new tf.Node(metadata, node, namespaces, this));
  2096. }
  2097. }
  2098. };
  2099. tf.Utility = class {
  2100. static decodeText(value) {
  2101. if (typeof value === 'string') {
  2102. return value;
  2103. }
  2104. if (value.length === 0) {
  2105. return '';
  2106. }
  2107. tf.Utility._utf8Decoder = tf.Utility._utf8Decoder || new TextDecoder('utf-8');
  2108. return tf.Utility._utf8Decoder.decode(value);
  2109. }
  2110. static dataType(type) {
  2111. if (!tf.Utility._dataTypes) {
  2112. const DataType = tf.proto.tensorflow.DataType;
  2113. const dataTypes = new Map(Object.entries(DataType).map(([name, value]) => {
  2114. const key = name.startsWith('DT_') ? name.substring(3) : name;
  2115. return [value, key.toLowerCase()];
  2116. }));
  2117. dataTypes.set(DataType.DT_HALF, 'float16');
  2118. dataTypes.set(DataType.DT_FLOAT, 'float32');
  2119. dataTypes.set(DataType.DT_DOUBLE, 'float64');
  2120. dataTypes.set(DataType.DT_BOOL, 'boolean');
  2121. dataTypes.set(DataType.DT_COMPLEX64, 'complex<float32>');
  2122. dataTypes.set(DataType.DT_COMPLEX128, 'complex<float64>');
  2123. dataTypes.set(DataType.DT_FLOAT8_E5M2, 'float8e5m2');
  2124. dataTypes.set(DataType.DT_FLOAT8_E4M3FN, 'float8e4m3fn');
  2125. dataTypes.set(DataType.DT_FLOAT8_E4M3FNUZ, 'float8e4m3fnuz');
  2126. dataTypes.set(DataType.DT_FLOAT8_E4M3B11FNUZ, 'float8e4m3b11fnuz');
  2127. dataTypes.set(DataType.DT_FLOAT8_E5M2FNUZ, 'float8e5m2fnuz');
  2128. tf.Utility._dataTypes = dataTypes;
  2129. }
  2130. return tf.Utility._dataTypes.has(type) ? tf.Utility._dataTypes.get(type) : '?';
  2131. }
  2132. static dataTypeKey(type) {
  2133. if (!tf.Utility._dataTypeKeys) {
  2134. tf.Utility.dataType(0);
  2135. tf.Utility._dataTypeKeys = new Map(Array.from(tf.Utility._dataTypes).map(([key, value]) => [value, key]));
  2136. }
  2137. return tf.Utility._dataTypeKeys.get(type);
  2138. }
  2139. };
  2140. tf.Error = class extends Error {
  2141. constructor(message) {
  2142. super(message);
  2143. this.name = 'Error loading TensorFlow model.';
  2144. }
  2145. };
  2146. export const ModelFactory = tf.ModelFactory;