tf.js 112 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695
  1. // Experimental
  2. var tf = tf || {};
  3. var base = base || require('./base');
  4. var gzip = gzip || require('./gzip');
  5. var json = json || require('./json');
  6. var protobuf = protobuf || require('./protobuf');
  7. tf.ModelFactory = class {
  8. match(context) {
  9. const identifier = context.identifier;
  10. const extension = identifier.split('.').pop().toLowerCase();
  11. if (extension === 'pbtxt' || extension === 'prototxt' || extension === 'pt' || extension === 'txt') {
  12. if (identifier.endsWith('predict_net.pbtxt') || identifier.endsWith('predict_net.prototxt') ||
  13. identifier.endsWith('init_net.pbtxt') || identifier.endsWith('init_net.prototxt')) {
  14. return undefined;
  15. }
  16. const tags = context.tags('pbtxt');
  17. if (['input_stream', 'output_stream', 'input_side_packet', 'output_side_packet'].some((key) => tags.has(key) || tags.has('node.' + key))) {
  18. return undefined;
  19. }
  20. if (tags.has('saved_model_schema_version') || tags.has('meta_graphs')) {
  21. return 'tf.pbtxt.SavedModel';
  22. }
  23. if (tags.has('graph_def')) {
  24. return 'tf.pbtxt.MetaGraphDef';
  25. }
  26. if (tags.has('node')) {
  27. return 'tf.pbtxt.GraphDef';
  28. }
  29. }
  30. if (extension === 'pb' || extension === 'pbtxt' || extension === 'prototxt' || extension === 'graphdef' || extension === 'meta') {
  31. if (identifier.endsWith('predict_net.pb') || identifier.endsWith('init_net.pb')) {
  32. return undefined;
  33. }
  34. if (identifier == 'tfhub_module.pb') {
  35. const stream = context.stream;
  36. const signature = [ 0x08, 0x03 ];
  37. if (signature.length === stream.length && stream.peek(signature.length).every((value, index) => value === signature[index])) {
  38. return undefined;
  39. }
  40. }
  41. const tags = context.tags('pb');
  42. if (tags.size > 0) {
  43. if (Array.from(tags).every((pair) => pair[0] < 8 && pair[1] !== 5)) {
  44. const match = (tags, schema) => {
  45. for (const pair of schema) {
  46. const key = pair[0];
  47. const inner = pair[1];
  48. const value = tags[key];
  49. if (value === undefined) {
  50. continue;
  51. }
  52. if (inner === false) {
  53. return false;
  54. }
  55. if (Array.isArray(inner)) {
  56. if (typeof value !== 'object' || !match(value, inner)) {
  57. return false;
  58. }
  59. }
  60. else if (inner !== value) {
  61. if (inner === 2 && !Array.isArray(value) && Object(value) === (value) && Object.keys(value).length === 0) {
  62. return true;
  63. }
  64. return false;
  65. }
  66. }
  67. return true;
  68. };
  69. const signatureGraphDef = [
  70. [1 /* node */, [
  71. [1 /* name */, 2],
  72. [2 /* op */, 2],
  73. [3 /* input */, 2],
  74. [4 /* device */,2],
  75. [5 /* attr */, [
  76. [1,2],
  77. [2,[]]
  78. ]],
  79. [6 /* experimental_debug_info */, []]
  80. ]],
  81. [2 /* library */, []],
  82. [3 /* version */, 0],
  83. [4 /* versions */, [[1,0],[2,0]]]
  84. ];
  85. const signatureMetaGraphDef = [
  86. [1 /* meta_info_def */, [[1,2],[2,[]],[3,[]],[4,2],[6,2],[7,0],[8,[]]]],
  87. [2 /* graph_def */, signatureGraphDef],
  88. [3 /* saver_def */, [[1,2],[2,2],[3,2],[4,0],[5,0],[6,5],[7,0]]],
  89. [4 /* collection_def */,[]],
  90. [5 /* signature_def */, []],
  91. [6 /* asset_file_def */, []],
  92. [7 /* object_graph_def */, []]
  93. ];
  94. const signatureSavedModel = [[1,0],[2,signatureMetaGraphDef]];
  95. if (tags.size === 1 && tags.get(1) === 2) {
  96. const tags = context.tags('pb+');
  97. // mediapipe.BoxDetectorIndex
  98. if (match(tags, [[1,[[1,[[1,[[1,5],[2,5],[3,5],[4,5],[6,0],[7,5],[8,5],[10,5],[11,0],[12,0]]],[2,5],[3,[]]]],[2,false],[3,false],[4,false],[5,false]]],[2,false],[3,false]] )) {
  99. return undefined;
  100. }
  101. // third_party.tensorflow.python.keras.protobuf.SavedMetadata
  102. if (match(tags, [[1,[[1,[[1,0],[2,0]]],[2,0],[3,2],[4,2],[5,2]]]])) {
  103. return 'tf.pb.keras.SavedMetadata';
  104. }
  105. }
  106. if ((!tags.has(1) || tags.get(1) === 0) && tags.get(2) === 2) {
  107. const tags = context.tags('pb+');
  108. if (match(tags, signatureSavedModel)) {
  109. return 'tf.pb.SavedModel';
  110. }
  111. }
  112. if ((!tags.has(1) || tags.get(1) === 2) &&
  113. (!tags.has(2) || tags.get(2) === 2) &&
  114. (!tags.has(3) || tags.get(3) === 2) &&
  115. (!tags.has(4) || tags.get(4) === 2)) {
  116. const tags = context.tags('pb+');
  117. if (match(tags, signatureMetaGraphDef)) {
  118. return 'tf.pb.MetaGraphDef';
  119. }
  120. }
  121. if (tags.get(1) !== 2) {
  122. const tags = context.tags('pb+');
  123. if (match(tags, signatureGraphDef)) {
  124. return 'tf.pb.GraphDef';
  125. }
  126. }
  127. const decode = (buffer, value) => {
  128. const reader = protobuf.BinaryReader.open(buffer);
  129. const length = reader.length;
  130. while (reader.position < length) {
  131. const tag = reader.uint32();
  132. const number = tag >>> 3;
  133. const type = tag & 7;
  134. if (value === number) {
  135. return type === 2 ? reader.bytes() : null;
  136. }
  137. reader.skipType(type);
  138. }
  139. return null;
  140. };
  141. const stream = context.stream;
  142. const buffer = stream.peek();
  143. const nodeBuffer = decode(buffer, 1);
  144. if (nodeBuffer) {
  145. const nameBuffer = decode(nodeBuffer, 1);
  146. if (nameBuffer) {
  147. const decoder = new TextDecoder('utf-8');
  148. const name = decoder.decode(nameBuffer);
  149. if (Array.from(name).filter((c) => c <= ' ').length < 256) {
  150. return 'tf.pb.GraphDef';
  151. }
  152. }
  153. }
  154. }
  155. }
  156. else {
  157. const tags = context.tags('pbtxt');
  158. if (['input_stream', 'output_stream', 'input_side_packet', 'output_side_packet'].some((key) => tags.has(key) || tags.has('node.' + key))) {
  159. return undefined;
  160. }
  161. if (tags.has('node')) {
  162. return 'tf.pbtxt.GraphDef';
  163. }
  164. if (tags.has('graph_def')) {
  165. return 'tf.pbtxt.MetaGraphDef';
  166. }
  167. if (tags.has('saved_model_schema_version') || tags.has('meta_graphs')) {
  168. return 'tf.pbtxt.SavedModel';
  169. }
  170. }
  171. }
  172. if (extension === 'json') {
  173. for (const type of [ 'json', 'json.gz' ]) {
  174. const obj = context.open(type);
  175. if (obj && obj.modelTopology && (obj.format === 'graph-model' || Array.isArray(obj.modelTopology.node))) {
  176. return 'tf.' + type;
  177. }
  178. }
  179. }
  180. if (extension === 'index' || extension === 'ckpt') {
  181. const stream = context.stream;
  182. if (stream.length > 8) {
  183. stream.seek(-8);
  184. const buffer = stream.read(8);
  185. stream.seek(0);
  186. const signature = [ 0x57, 0xfb, 0x80, 0x8b, 0x24, 0x75, 0x47, 0xdb ];
  187. if (buffer.every((value, index) => value === signature[index])) {
  188. return 'tf.bundle';
  189. }
  190. }
  191. }
  192. if (/.data-[0-9][0-9][0-9][0-9][0-9]-of-[0-9][0-9][0-9][0-9][0-9]$/.exec(identifier)) {
  193. return 'tf.data';
  194. }
  195. if (/^events.out.tfevents./.exec(identifier)) {
  196. const stream = context.stream;
  197. if (tf.EventFileReader.open(stream)) {
  198. return 'tf.events';
  199. }
  200. }
  201. if (extension === 'pbmm') {
  202. const stream = context.stream;
  203. if (stream.length > 8) {
  204. stream.seek(-8);
  205. const buffer = stream.read(8);
  206. stream.seek(0);
  207. const reader = new base.BinaryReader(buffer);
  208. const offset = reader.uint64();
  209. if (offset < stream.length) {
  210. return 'tf.pb.mmap';
  211. }
  212. }
  213. }
  214. return undefined;
  215. }
  216. open(context, match) {
  217. return context.require('./tf-proto').then(() => {
  218. tf.proto = protobuf.get('tf');
  219. const openModel = (saved_model, format, producer, bundle) => {
  220. return context.metadata('tf-metadata.json').then((metadata) => {
  221. return new tf.Model(metadata, saved_model, format, producer, bundle);
  222. });
  223. };
  224. const openSavedModel = (saved_model, format, producer) => {
  225. if (saved_model.meta_graphs.length === 1 &&
  226. saved_model.meta_graphs[0].object_graph_def &&
  227. saved_model.meta_graphs[0].object_graph_def.nodes &&
  228. saved_model.meta_graphs[0].object_graph_def.nodes.length > 0) {
  229. const identifier = 'variables/variables.index';
  230. return context.request(identifier, null).then((stream) => {
  231. return tf.TensorBundle.open(stream, identifier, context).then((bundle) => {
  232. return openModel(saved_model, format, producer, bundle);
  233. });
  234. }).catch(() => {
  235. return openModel(saved_model, format, producer, null);
  236. });
  237. }
  238. if (saved_model && saved_model.meta_graphs && saved_model.meta_graphs.length > 0 &&
  239. saved_model.meta_graphs[0].meta_info_def &&
  240. Object.prototype.hasOwnProperty.call(saved_model.meta_graphs[0].meta_info_def, 'tensorflow_version')) {
  241. producer = 'TensorFlow v' + saved_model.meta_graphs[0].meta_info_def.tensorflow_version;
  242. }
  243. return openModel(saved_model, format, producer, null);
  244. };
  245. const openBundle = (context, stream, identifier) => {
  246. stream = stream || context.stream;
  247. identifier = identifier || context.identifier;
  248. return tf.TensorBundle.open(stream, identifier, context).then((bundle) => {
  249. return openModel(null, 'TensorFlow Tensor Bundle v' + bundle.format.toString(), null, bundle);
  250. }).catch((error) => {
  251. context.exception(error, false);
  252. const message = error && error.message ? error.message : error.toString();
  253. throw new tf.Error(message.replace(/\.$/, '') + " in '" + identifier + "'.");
  254. });
  255. };
  256. const openData = (context) => {
  257. const identifier = context.identifier;
  258. const base = identifier.split('.');
  259. base.pop();
  260. const file = base.join('.') + '.index';
  261. return context.request(file, null).then((stream) => {
  262. return openBundle(context, stream, file);
  263. }).catch((/* error */) => {
  264. const file = base.join('.') + '.ckpt';
  265. return context.request(file, null).then((stream) => {
  266. openBundle(context, stream, file);
  267. });
  268. });
  269. };
  270. const openEventFile = (context) => {
  271. let format = 'TensorFlow Event File';
  272. let producer = null;
  273. const stream = context.stream;
  274. const eventFileReader = tf.EventFileReader.open(stream);
  275. const saved_model = new tf.proto.tensorflow.SavedModel();
  276. const run_metadata = [];
  277. const summaries = [];
  278. for (;;) {
  279. const event = eventFileReader.read();
  280. if (!event) {
  281. break;
  282. }
  283. switch (event.what) {
  284. case 'file_version': {
  285. const formats = new Map([
  286. [ 'brain.Event:1', 'TensorFlow Event File v1' ],
  287. [ 'brain.Event:2', 'TensorFlow Event File v2' ]
  288. ]);
  289. if (!formats.has(event.file_version)) {
  290. throw new tf.Error("Unsupported event file version '" + event.file_version + "'.");
  291. }
  292. format = formats.get(event.file_version);
  293. break;
  294. }
  295. case 'graph_def': {
  296. const buffer = event.graph_def;
  297. const reader = protobuf.BinaryReader.open(buffer);
  298. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  299. const meta_graph_def = new tf.proto.tensorflow.MetaGraphDef();
  300. meta_graph_def.meta_info_def = new tf.proto.tensorflow.MetaGraphDef.MetaInfoDef();
  301. meta_graph_def.meta_info_def.any_info = event.wall_time.toString();
  302. meta_graph_def.graph_def = graph_def;
  303. saved_model.meta_graphs.push(meta_graph_def);
  304. break;
  305. }
  306. case 'meta_graph_def': {
  307. const buffer = event.meta_graph_def;
  308. const reader = protobuf.BinaryReader.open(buffer);
  309. const meta_graph_def = tf.proto.tensorflow.MetaGraphDef.decode(reader);
  310. saved_model.meta_graphs.push(meta_graph_def);
  311. break;
  312. }
  313. case 'summary': {
  314. for (const value of event.summary.value) {
  315. summaries.push(value);
  316. }
  317. break;
  318. }
  319. case 'tagged_run_metadata': {
  320. const entry = event.tagged_run_metadata;
  321. const buffer = entry.run_metadata;
  322. const reader = protobuf.BinaryReader.open(buffer);
  323. const metadata = tf.proto.tensorflow.RunMetadata.decode(reader);
  324. run_metadata.push(metadata);
  325. break;
  326. }
  327. default: {
  328. throw new tf.Error("Unsupported event type '" + event.what + "'.");
  329. }
  330. }
  331. }
  332. if (saved_model.meta_graphs.every((meta_graph) => meta_graph.graph_def.node.every((node) => node.op.startsWith('aten::') || node.op.startsWith('prim::') || node.op === 'IO Node'))) {
  333. producer = 'PyTorch';
  334. const openPyTorchMetadata = (context, saved_model) => {
  335. return context.request('pytorch-metadata.json', 'utf-8', null).then((data) => {
  336. const metadata = new Map();
  337. for (const item of JSON.parse(data)) {
  338. const index = item.name.indexOf(':');
  339. const key = (index !== -1) ? item.name.substring(0, index) : item.name;
  340. const name = key.replace(/^torch\./, 'aten::');
  341. if (!metadata.has(name)) {
  342. metadata.set(name, []);
  343. }
  344. metadata.get(name).push(item);
  345. }
  346. for (const meta_graph of saved_model.meta_graphs) {
  347. for (const node of meta_graph.graph_def.node) {
  348. node.__metadata__ = Array.from(metadata.get(node.op) || []);
  349. }
  350. }
  351. return saved_model;
  352. }).catch(() => {
  353. return saved_model;
  354. });
  355. };
  356. return openPyTorchMetadata(context, saved_model).then((saved_model) => {
  357. return openModel(saved_model, format, producer, null);
  358. });
  359. }
  360. return openSavedModel(saved_model, format, producer);
  361. };
  362. const openJson = (context, type) => {
  363. try {
  364. const obj = context.open(type);
  365. const format = 'TensorFlow.js ' + (obj.format || 'graph-model');
  366. const producer = obj.convertedBy || obj.generatedBy || '';
  367. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  368. meta_graph.graph_def = tf.JsonReader.decodeGraphDef(obj.modelTopology);
  369. const saved_model = new tf.proto.tensorflow.SavedModel();
  370. saved_model.meta_graphs.push(meta_graph);
  371. const nodes = new Map();
  372. for (const node of meta_graph.graph_def.node) {
  373. node.input = node.input || [];
  374. if (node.op === 'Const') {
  375. nodes.set(node.name, node);
  376. }
  377. }
  378. const shards = new Map();
  379. const manifests = Array.isArray(obj.weightsManifest) ? obj.weightsManifest : [];
  380. for (const manifest of manifests) {
  381. for (const path of manifest.paths) {
  382. if (!shards.has(path)) {
  383. shards.set(path, context.request(path, null));
  384. }
  385. }
  386. }
  387. const openShards = (shards) => {
  388. const dtype_size_map = new Map([ [ 'float16', 2 ], [ 'float32', 4 ], [ 'float64', 8 ], [ 'int8', 1 ], [ 'int16', 2 ], [ 'int32', 4 ], [ 'int64', 8 ], [ 'uint8', 1 ], [ 'uint16', 2 ], [ 'uint32', 4 ], [ 'uint64', 8 ], [ 'bool', 1 ] ]);
  389. for (const manifest of manifests) {
  390. let buffer = null;
  391. if (Array.isArray(manifest.paths) && manifest.paths.length > 0 && manifest.paths.every((path) => shards.has(path))) {
  392. const list = manifest.paths.map((path) => shards.get(path));
  393. const size = list.reduce((a, b) => a + b.length, 0);
  394. buffer = new Uint8Array(size);
  395. let offset = 0;
  396. for (const item of list) {
  397. buffer.set(item, offset);
  398. offset += item.length;
  399. }
  400. }
  401. let offset = 0;
  402. for (const weight of manifest.weights) {
  403. const dtype = weight.quantization && weight.quantization.dtype ? weight.quantization.dtype : weight.dtype;
  404. const size = weight.shape.reduce((a, b) => a * b, 1);
  405. switch (dtype) {
  406. case 'string': {
  407. const data = [];
  408. if (buffer && size > 0) {
  409. const reader = new tf.BinaryReader(buffer.subarray(offset));
  410. for (let i = 0; i < size; i++) {
  411. data[i] = reader.string();
  412. }
  413. offset += reader.position;
  414. }
  415. if (nodes.has(weight.name)) {
  416. const node = nodes.get(weight.name);
  417. node.attr.value.tensor.dtype = tf.Utility.dataTypeKey(dtype);
  418. node.attr.value.tensor.string_val = data;
  419. }
  420. break;
  421. }
  422. default: {
  423. if (!dtype_size_map.has(dtype)) {
  424. throw new tf.Error("Unsupported weight data type size '" + dtype + "'.");
  425. }
  426. const itemsize = dtype_size_map.get(dtype);
  427. const length = itemsize * size;
  428. const tensor_content = buffer ? buffer.slice(offset, offset + length) : null;
  429. offset += length;
  430. if (nodes.has(weight.name)) {
  431. const node = nodes.get(weight.name);
  432. node.attr.value.tensor.dtype = tf.Utility.dataTypeKey(dtype);
  433. node.attr.value.tensor.tensor_content = tensor_content;
  434. }
  435. break;
  436. }
  437. }
  438. }
  439. }
  440. return openSavedModel(saved_model, format, producer, null);
  441. };
  442. return Promise.all(shards.values()).then((streams) => {
  443. for (const key of shards.keys()) {
  444. const stream = streams.shift();
  445. const buffer = stream.peek();
  446. shards.set(key, buffer);
  447. }
  448. if (type === 'json.gz') {
  449. try {
  450. for (const key of shards.keys()) {
  451. const stream = shards.get(key);
  452. const archive = gzip.Archive.open(stream);
  453. if (archive) {
  454. const entries = archive.entries;
  455. if (entries.size === 1) {
  456. const stream = entries.values().next().value;
  457. const buffer = stream.peek();
  458. shards.set(key, buffer);
  459. }
  460. }
  461. }
  462. }
  463. catch (error) {
  464. // continue regardless of error
  465. }
  466. }
  467. return openShards(shards);
  468. }).catch(() => {
  469. shards.clear();
  470. return openShards(shards);
  471. });
  472. }
  473. catch (error) {
  474. throw new tf.Error('File text format is not TensorFlow.js graph-model (' + error.message + ').');
  475. }
  476. };
  477. const openTextGraphDef = (context) => {
  478. try {
  479. const stream = context.stream;
  480. const reader = protobuf.TextReader.open(stream);
  481. const graph_def = tf.proto.tensorflow.GraphDef.decodeText(reader);
  482. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  483. meta_graph.graph_def = graph_def;
  484. const saved_model = new tf.proto.tensorflow.SavedModel();
  485. saved_model.meta_graphs.push(meta_graph);
  486. const format = 'TensorFlow Graph';
  487. return openSavedModel(saved_model, format, null);
  488. }
  489. catch (error) {
  490. const message = error && error.message ? error.message : error.toString();
  491. throw new tf.Error('File text format is not tensorflow.GraphDef (' + message.replace(/\.$/, '') + ').');
  492. }
  493. };
  494. const openTextMetaGraphDef = (context) => {
  495. try {
  496. const stream = context.stream;
  497. const reader = protobuf.TextReader.open(stream);
  498. const meta_graph = tf.proto.tensorflow.MetaGraphDef.decodeText(reader);
  499. const saved_model = new tf.proto.tensorflow.SavedModel();
  500. saved_model.meta_graphs.push(meta_graph);
  501. const format = 'TensorFlow MetaGraph';
  502. return openSavedModel(saved_model, format, null);
  503. }
  504. catch (error) {
  505. throw new tf.Error('File text format is not tensorflow.MetaGraphDef (' + error.message + ').');
  506. }
  507. };
  508. const openTextSavedModel = (context) => {
  509. try {
  510. const stream = context.stream;
  511. const reader = protobuf.TextReader.open(stream);
  512. const saved_model = tf.proto.tensorflow.SavedModel.decodeText(reader);
  513. let format = 'TensorFlow Saved Model';
  514. if (saved_model && Object.prototype.hasOwnProperty.call(saved_model, 'saved_model_schema_version')) {
  515. format = format + ' v' + saved_model.saved_model_schema_version.toString();
  516. }
  517. return openSavedModel(saved_model, format, null);
  518. }
  519. catch (error) {
  520. throw new tf.Error('File text format is not tensorflow.SavedModel (' + error.message + ').');
  521. }
  522. };
  523. const openBinaryGraphDef = (context) => {
  524. let saved_model = null;
  525. const format = 'TensorFlow Graph';
  526. try {
  527. const stream = context.stream;
  528. const reader = protobuf.BinaryReader.open(stream);
  529. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  530. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  531. meta_graph.graph_def = graph_def;
  532. saved_model = new tf.proto.tensorflow.SavedModel();
  533. saved_model.meta_graphs.push(meta_graph);
  534. }
  535. catch (error) {
  536. const message = error && error.message ? error.message : error.toString();
  537. throw new tf.Error('File format is not tensorflow.GraphDef (' + message.replace(/\.$/, '') + ').');
  538. }
  539. return openSavedModel(saved_model, format, null);
  540. };
  541. const openBinaryMetaGraphDef = (context) => {
  542. let saved_model = null;
  543. const format = 'TensorFlow MetaGraph';
  544. try {
  545. const stream = context.stream;
  546. const reader = protobuf.BinaryReader.open(stream);
  547. const meta_graph = tf.proto.tensorflow.MetaGraphDef.decode(reader);
  548. saved_model = new tf.proto.tensorflow.SavedModel();
  549. saved_model.meta_graphs.push(meta_graph);
  550. }
  551. catch (error) {
  552. const message = error && error.message ? error.message : error.toString();
  553. throw new tf.Error('File format is not tensorflow.MetaGraphDef (' + message.replace(/\.$/, '') + ').');
  554. }
  555. return openSavedModel(saved_model, format, null);
  556. };
  557. const openBinarySavedModel = (context) => {
  558. let saved_model = null;
  559. let format = 'TensorFlow Saved Model';
  560. try {
  561. const stream = context.stream;
  562. const reader = protobuf.BinaryReader.open(stream);
  563. saved_model = tf.proto.tensorflow.SavedModel.decode(reader);
  564. if (saved_model && Object.prototype.hasOwnProperty.call(saved_model, 'saved_model_schema_version')) {
  565. format = format + ' v' + saved_model.saved_model_schema_version.toString();
  566. }
  567. }
  568. catch (error) {
  569. const message = error && error.message ? error.message : error.toString();
  570. throw new tf.Error('File format is not tensorflow.SavedModel (' + message.replace(/\.$/, '') + ').');
  571. }
  572. return openSavedModel(saved_model, format, null);
  573. };
  574. const openSavedMetadata = (context) => {
  575. /*
  576. const stream = context.stream;
  577. const reader = protobuf.BinaryReader.open(stream);
  578. const saved_metadata = tf.proto.third_party.tensorflow.python.keras.protobuf.SavedMetadata.decode(reader);
  579. debugger;
  580. */
  581. const identifier = 'saved_model.pb';
  582. return context.request(identifier, null).then((stream) => {
  583. return openBinarySavedModel({ stream: stream });
  584. });
  585. };
  586. const openMemmapped = (context) => {
  587. const stream = context.stream;
  588. const readDirectoryOffset = (stream) => {
  589. stream.seek(-8);
  590. const buffer = stream.read(8);
  591. const reader = new base.BinaryReader(buffer);
  592. return reader.uint64();
  593. };
  594. const readDirectory = (stream, offset) => {
  595. const end = stream.position - 8;
  596. stream.seek(offset);
  597. const buffer = stream.read(end - offset);
  598. const reader = protobuf.BinaryReader.open(buffer);
  599. return tf.proto.tensorflow.MemmappedFileSystemDirectory.decode(reader);
  600. };
  601. const offset = readDirectoryOffset(stream);
  602. const directory = readDirectory(stream, offset);
  603. const elements = new Map();
  604. for (const element of directory.element) {
  605. const name = element.name;
  606. if (elements.has(name)) {
  607. throw new tf.Error("Memory mapped file directory contains duplicate '" + name + "'.");
  608. }
  609. elements.set(name, {
  610. offset: element.offset ? element.offset.toNumber() : 0,
  611. length: element.length ? element.length.toNumber() : 0
  612. });
  613. }
  614. const offsets = Array.from(elements).map((entry) => entry[1].offset);
  615. offsets.push(offset);
  616. for (const value of elements.values()) {
  617. if (value.length === 0) {
  618. const min = Math.min.apply(null, offsets.filter((offset) => offset > value.offset));
  619. if (Number.isInteger(min)) {
  620. value.length = min - value.offset;
  621. }
  622. }
  623. }
  624. for (const entry of elements) {
  625. const offset = entry[1].offset;
  626. const length = entry[1].length;
  627. stream.seek(offset);
  628. entry[1].buffer = stream.read(length);
  629. }
  630. if (!elements.has('memmapped_package://.')) {
  631. throw new tf.Error('Memory mapped file directory does not contain tensorflow.GraphDef root.');
  632. }
  633. const element = elements.get('memmapped_package://.');
  634. const buffer = element.buffer;
  635. const reader = protobuf.BinaryReader.open(buffer);
  636. const graph_def = tf.proto.tensorflow.GraphDef.decode(reader);
  637. const format = 'TensorFlow GraphDef Memmapped';
  638. const meta_graph = new tf.proto.tensorflow.MetaGraphDef();
  639. meta_graph.graph_def = graph_def;
  640. const saved_model = new tf.proto.tensorflow.SavedModel();
  641. saved_model.meta_graphs.push(meta_graph);
  642. return openSavedModel(saved_model, format, null);
  643. };
  644. switch (match) {
  645. case 'tf.bundle':
  646. return openBundle(context);
  647. case 'tf.data':
  648. return openData(context);
  649. case 'tf.events':
  650. return openEventFile(context);
  651. case 'tf.json':
  652. return openJson(context, 'json');
  653. case 'tf.json.gz':
  654. return openJson(context, 'json.gz');
  655. case 'tf.pbtxt.GraphDef':
  656. return openTextGraphDef(context);
  657. case 'tf.pbtxt.MetaGraphDef':
  658. return openTextMetaGraphDef(context);
  659. case 'tf.pbtxt.SavedModel':
  660. return openTextSavedModel(context);
  661. case 'tf.pb.GraphDef':
  662. return openBinaryGraphDef(context);
  663. case 'tf.pb.MetaGraphDef':
  664. return openBinaryMetaGraphDef(context);
  665. case 'tf.pb.SavedModel':
  666. return openBinarySavedModel(context);
  667. case 'tf.pb.keras.SavedMetadata':
  668. return openSavedMetadata(context);
  669. case 'tf.pb.mmap':
  670. return openMemmapped(context);
  671. default:
  672. throw new tf.Error("Unsupported TensorFlow format '" + match + "'.");
  673. }
  674. });
  675. }
  676. };
  677. tf.Model = class {
  678. constructor(metadata, model, format, producer, bundle) {
  679. this._format = format;
  680. this._producer = producer || '';
  681. this._graphs = [];
  682. if (model) {
  683. for (let i = 0; i < model.meta_graphs.length; i++) {
  684. const meta_graph = model.meta_graphs[i];
  685. const name = (meta_graph.meta_info_def && meta_graph.meta_info_def.any_info) ? meta_graph.meta_info_def.any_info.toString() : ((model.meta_graphs.length > 1) ? i.toString() : '-');
  686. const graph = new tf.Graph(metadata, meta_graph, name, bundle);
  687. this._graphs.push(graph);
  688. }
  689. }
  690. else {
  691. const graph = new tf.Graph(metadata, null, '', bundle);
  692. this._graphs.push(graph);
  693. }
  694. }
  695. get format() {
  696. return this._format;
  697. }
  698. get producer() {
  699. return this._producer;
  700. }
  701. get description() {
  702. return null;
  703. }
  704. get graphs() {
  705. return this._graphs;
  706. }
  707. };
  708. tf.Graph = class {
  709. constructor(metadata, meta_graph, name, bundle) {
  710. this._name = name;
  711. this._inputs = [];
  712. this._outputs = [];
  713. this._nodes = [];
  714. this._version = null;
  715. if (meta_graph && meta_graph.graph_def) {
  716. const graph = meta_graph.graph_def;
  717. if (graph.versions) {
  718. this._version = 'v' + graph.versions.producer.toString();
  719. }
  720. else if (graph.version) {
  721. this._version = graph.version;
  722. }
  723. else if (meta_graph.meta_info_def && meta_graph.meta_info_def.tensorflow_version) {
  724. this._version = meta_graph.meta_info_def.tensorflow_version;
  725. }
  726. if (meta_graph.meta_info_def && meta_graph.meta_info_def.tags) {
  727. this._tags = meta_graph.meta_info_def.tags.join(', ');
  728. }
  729. metadata = new tf.GraphMetadata(metadata, graph.library);
  730. const nodes = graph.node || [];
  731. const context = tf.Utility.createGraph(metadata, nodes);
  732. this._nodes = context.nodes;
  733. this._inputs = context.inputs;
  734. this._outputs = context.outputs;
  735. }
  736. else if (bundle) {
  737. const nodes = new Map();
  738. for (const tensor of bundle.tensors) {
  739. const parts = tensor.name.split('/');
  740. if (bundle.format === 2) {
  741. if (tensor.name === '_CHECKPOINTABLE_OBJECT_GRAPH' ||
  742. tensor.name.startsWith('optimizer/') ||
  743. tensor.name.startsWith('keras_api/metrics/') ||
  744. tensor.name.endsWith('/ExponentialMovingAverage') ||
  745. tensor.name.indexOf('.OPTIMIZER_SLOT') !== -1) {
  746. continue;
  747. }
  748. if (tensor.name.endsWith('/.ATTRIBUTES/VARIABLE_VALUE')) {
  749. parts.pop();
  750. parts.pop();
  751. }
  752. }
  753. const tensorName = parts.pop();
  754. const name = parts.join('/');
  755. if (!nodes.has(name)) {
  756. nodes.set(name, []);
  757. }
  758. nodes.get(name).push({ name: tensorName, value: tensor });
  759. }
  760. const namespaces = new Set();
  761. this._nodes = Array.from(nodes).map((entry) => {
  762. const node = { op: 'Node', name: entry[0] };
  763. return new tf.Node(metadata, node, namespaces, null, entry[1]);
  764. });
  765. }
  766. }
  767. get name() {
  768. return this._name;
  769. }
  770. get version() {
  771. return this._version;
  772. }
  773. get tags() {
  774. return this._tags;
  775. }
  776. get groups() {
  777. return false;
  778. // TODO return true;
  779. }
  780. get inputs() {
  781. return this._inputs;
  782. }
  783. get outputs() {
  784. return this._outputs;
  785. }
  786. get nodes() {
  787. return this._nodes;
  788. }
  789. get metadata() {
  790. return this._metadata;
  791. }
  792. };
  793. tf.Parameter = class {
  794. constructor(name, args) {
  795. this._name = name;
  796. this._arguments = args;
  797. }
  798. get name() {
  799. return this._name;
  800. }
  801. get visible() {
  802. return true;
  803. }
  804. get arguments() {
  805. return this._arguments;
  806. }
  807. };
  808. tf.Argument = class {
  809. constructor(name, type, initializer) {
  810. if (typeof name !== 'string') {
  811. throw new tf.Error("Invalid argument identifier '" + JSON.stringify(name) + "'.");
  812. }
  813. this._name = name;
  814. this._type = type || null;
  815. this._initializer = initializer || null;
  816. }
  817. get name() {
  818. return this._name;
  819. }
  820. get type() {
  821. if (this._initializer) {
  822. return this._initializer.type;
  823. }
  824. return this._type;
  825. }
  826. get initializer() {
  827. return this._initializer;
  828. }
  829. };
  830. tf.Function = class {
  831. constructor(metadata, name, func) {
  832. this._name = name;
  833. this._version = null;
  834. this._tags = null;
  835. this._inputs = [];
  836. this._outputs = [];
  837. this._nodes = [];
  838. this._description = !func ? 'Function definition not found.' : null;
  839. const input_arg = func && func.signature ? func.signature.input_arg : [];
  840. const output_arg = func && func.signature ? func.signature.output_arg : [];
  841. const ret = func && func.ret ? func.ret : {};
  842. const nodes = func && func.node_def ? func.node_def : [];
  843. if (input_arg) {
  844. for (const input of input_arg) {
  845. const argument = new tf.Argument(input.name, new tf.TensorType(input.type, null), null);
  846. this._inputs.push(new tf.Parameter(input.name, [ argument ]));
  847. }
  848. }
  849. const output_arg_map = new Map();
  850. if (output_arg) {
  851. const ret_map = new Map();
  852. for (const key of Object.keys(ret)) {
  853. const value = func.ret[key];
  854. const split = value.split(':', 2);
  855. ret_map.set(key, split[0]);
  856. }
  857. for (const output of output_arg) {
  858. const name = ret_map.get(output.name);
  859. this._outputs.push(new tf.Parameter(output.name, [
  860. new tf.Argument(name, new tf.TensorType(output.type, null), null)
  861. ]));
  862. output_arg_map.set(name, output.name);
  863. }
  864. }
  865. const context = tf.Utility.createGraph(metadata, nodes, output_arg_map);
  866. this._nodes = context.nodes;
  867. this._inputs = this._inputs.concat(context.inputs);
  868. this._outputs = this._outputs.concat(context.outputs);
  869. }
  870. get type() {
  871. return 'function';
  872. }
  873. get name() {
  874. return this._name;
  875. }
  876. get description() {
  877. return this._description || '';
  878. }
  879. get version() {
  880. return this._version;
  881. }
  882. get tags() {
  883. return this._tags;
  884. }
  885. get groups() {
  886. return false;
  887. // TODO return true;
  888. }
  889. get inputs() {
  890. return this._inputs;
  891. }
  892. get outputs() {
  893. return this._outputs;
  894. }
  895. get nodes() {
  896. return this._nodes;
  897. }
  898. };
  899. tf.Node = class {
  900. constructor(metadata, node, namespaces, initializers, tensors) {
  901. this._type = node.metadata || metadata.type(node.op) || { name: node.op };
  902. this._name = node.name;
  903. this._attributes = [];
  904. this._inputs = [];
  905. this._outputs = [];
  906. this._group = '';
  907. if (node.name) {
  908. if (namespaces.has(node.name)) {
  909. this._group = node.name;
  910. }
  911. else {
  912. const lastIndex = node.name.lastIndexOf('/');
  913. if (lastIndex != -1) {
  914. const namespace = node.name.substring(0, lastIndex);
  915. if (namespaces.has(namespace)) {
  916. this._group = namespace;
  917. }
  918. }
  919. }
  920. }
  921. if (tensors) {
  922. for (const tensor of tensors) {
  923. this._inputs.push(new tf.Parameter(tensor.name, [
  924. new tf.Argument(tensor.value.name, null, tensor.value)
  925. ]));
  926. }
  927. }
  928. else {
  929. if (node.device !== undefined) {
  930. this._device = node.device;
  931. }
  932. if (node.attr) {
  933. this._attributes = Object.entries(node.attr).map((entry) => {
  934. return new tf.Attribute(metadata, node.op, entry[0], entry[1]);
  935. });
  936. }
  937. let inputIndex = 0;
  938. const inputs = (node.input || []).filter((input) => !input.name.startsWith('^'));
  939. if (this._type && this._type.inputs) {
  940. for (const input of this._type.inputs) {
  941. let inputCount = 1;
  942. if (input.numberAttr) {
  943. const inputNumber = node.attr[input.numberAttr];
  944. if (inputNumber && inputNumber.i) {
  945. inputCount = inputNumber.i;
  946. }
  947. }
  948. else if (input.typeListAttr) {
  949. const inputTypeListAttr = node.attr[input.typeListAttr];
  950. if (inputTypeListAttr && inputTypeListAttr.list && inputTypeListAttr.list.type) {
  951. inputCount = inputTypeListAttr.list.type.length;
  952. }
  953. }
  954. const inputArguments = inputs.slice(inputIndex, inputIndex + inputCount).map((input) => {
  955. return initializers.has(input.name) ? initializers.get(input.name) : new tf.Argument(input.name, null, null);
  956. });
  957. this._inputs.push(new tf.Parameter(input.name, inputArguments));
  958. inputIndex += inputCount;
  959. }
  960. }
  961. this._inputs.push(...inputs.slice(inputIndex).map((input, index) => {
  962. return new tf.Parameter(input.label ? input.label : (inputIndex + index).toString(), [
  963. initializers.has(input.name) ? initializers.get(input.name) : new tf.Argument(input.name, null, null)
  964. ]);
  965. }));
  966. let outputIndex = 0;
  967. const outputs = node.output || [];
  968. if (this._type && this._type.outputs) {
  969. for (const output of this._type.outputs) {
  970. let outputCount = 1;
  971. if (output.numberAttr) {
  972. const outputNumber = node.attr[output.numberAttr];
  973. if (outputNumber && outputNumber.i) {
  974. outputCount = outputNumber.i;
  975. }
  976. }
  977. else if (output.typeListAttr) {
  978. const outputTypeListAttr = node.attr[output.typeListAttr];
  979. if (outputTypeListAttr && outputTypeListAttr.list && outputTypeListAttr.list.type) {
  980. outputCount = outputTypeListAttr.list.type.length;
  981. }
  982. }
  983. const outputArguments = outputs.slice(outputIndex, outputIndex + outputCount).map((output) => {
  984. return new tf.Argument(output.name ? output.name : '-', null, null);
  985. });
  986. this._outputs.push(new tf.Parameter(output.name, outputArguments));
  987. outputIndex += outputCount;
  988. }
  989. }
  990. this._outputs.push(...outputs.slice(outputIndex).map((output, index) => {
  991. return new tf.Parameter((outputIndex + index).toString(), [
  992. new tf.Argument(output.name ? output.name : '-', null, null)
  993. ]);
  994. }));
  995. const controlDependencies = node.controlDependencies || [];
  996. this._controlDependencies = controlDependencies.map((input) => new tf.Argument(input.name));
  997. }
  998. }
  999. get type() {
  1000. return this._type;
  1001. }
  1002. get name() {
  1003. return this._name;
  1004. }
  1005. get device() {
  1006. return this._device || null;
  1007. }
  1008. get group() {
  1009. return this._group;
  1010. }
  1011. get description() {
  1012. return '';
  1013. }
  1014. get inputs() {
  1015. return this._inputs;
  1016. }
  1017. get outputs() {
  1018. return this._outputs;
  1019. }
  1020. get controlDependencies() {
  1021. return this._controlDependencies;
  1022. }
  1023. get attributes() {
  1024. return this._attributes;
  1025. }
  1026. };
  1027. tf.Attribute = class {
  1028. constructor(metadata, op, name, value) {
  1029. this._name = name;
  1030. this._value = null;
  1031. this._type = null;
  1032. const schema = value && value.metadata ? value.metadata : metadata.attribute(op, name);
  1033. const visible = metadata.visible(op, name);
  1034. if (schema && schema.type) {
  1035. this._type = schema.type;
  1036. }
  1037. switch (value.value) {
  1038. case undefined:
  1039. this._type = '';
  1040. this._value = null;
  1041. break;
  1042. case 'type':
  1043. this._type = 'type';
  1044. this._value = tf.Utility.dataType(value.type);
  1045. break;
  1046. case 'i':
  1047. this._value = value.i;
  1048. break;
  1049. case 'f':
  1050. this._value = value.f;
  1051. break;
  1052. case 'b':
  1053. this._value = value.b;
  1054. break;
  1055. case 'shape':
  1056. this._type = 'shape';
  1057. this._value = new tf.TensorShape(value.shape);
  1058. break;
  1059. case 's':
  1060. this._value = tf.Utility.decodeText(value.s);
  1061. break;
  1062. case 'tensor': {
  1063. this._type = 'tensor';
  1064. this._value = new tf.Tensor(value.tensor);
  1065. break;
  1066. }
  1067. case 'func': {
  1068. this._type = 'function';
  1069. this._value = new tf.Node(metadata, { op: value.func.name, attr: value.func.attr });
  1070. break;
  1071. }
  1072. case 'placeholder': {
  1073. this._type = 'placeholder';
  1074. this._value = value;
  1075. break;
  1076. }
  1077. case 'list': {
  1078. const list = value.list;
  1079. if (list.s && list.s.length > 0) {
  1080. this._value = list.s.map((s) => tf.Utility.decodeText(s));
  1081. }
  1082. else if (list.i && list.i.length > 0) {
  1083. this._value = list.i;
  1084. }
  1085. else if (list.f && list.f.length > 0) {
  1086. this._value = list.f;
  1087. }
  1088. else if (list.type && list.type.length > 0) {
  1089. this._type = 'type[]';
  1090. this._value = list.type.map((type) => tf.Utility.dataType(type));
  1091. }
  1092. else if (list.shape && list.shape.length > 0) {
  1093. this._type = 'shape[]';
  1094. this._value = list.shape.map((shape) => new tf.TensorShape(shape));
  1095. }
  1096. else if (list.func && list.func.length > 0) {
  1097. this._type = 'function[]';
  1098. this._value = list.func.map((func) => new tf.Node(metadata, { op: func.name, attr: func.attr }));
  1099. }
  1100. else {
  1101. this._value = [];
  1102. }
  1103. break;
  1104. }
  1105. default: {
  1106. throw new tf.Error("Unsupported attribute value type '" + JSON.stringify(value).substring(0, 32) + "'.");
  1107. }
  1108. }
  1109. if (schema) {
  1110. if (Object.prototype.hasOwnProperty.call(schema, 'visible') && !schema.visible) {
  1111. this._visible = false;
  1112. }
  1113. else if (Object.prototype.hasOwnProperty.call(schema, 'default')) {
  1114. const equals = (value, defaultValue) => {
  1115. if (!Array.isArray(defaultValue) && defaultValue === Object(defaultValue)) {
  1116. switch (defaultValue.type) {
  1117. case 'type':
  1118. defaultValue = tf.Utility.dataType(defaultValue.value);
  1119. break;
  1120. case 'shape':
  1121. case 'tensor':
  1122. defaultValue = defaultValue.value;
  1123. break;
  1124. default:
  1125. throw new tf.Error(JSON.stringify(defaultValue));
  1126. }
  1127. }
  1128. if (typeof value === 'boolean' || typeof value === 'number' || typeof value === 'string') {
  1129. return value === defaultValue;
  1130. }
  1131. if (value instanceof base.Int64 || value instanceof base.Uint64) {
  1132. return value.toNumber() === defaultValue;
  1133. }
  1134. return false;
  1135. };
  1136. const value = this._value;
  1137. const defaultValue = schema.default;
  1138. if (Array.isArray(value) && Array.isArray(defaultValue)) {
  1139. if (value.length === defaultValue.length && value.every((item, index) => equals(item, defaultValue[index]))) {
  1140. this._visible = false;
  1141. }
  1142. }
  1143. else if (equals(value, defaultValue)) {
  1144. this._visible = false;
  1145. }
  1146. }
  1147. }
  1148. if (name == '_output_shapes') {
  1149. this._visible = false;
  1150. }
  1151. if (name == '_class') {
  1152. this._visible = false;
  1153. }
  1154. if (visible === false) {
  1155. this._visible = false;
  1156. }
  1157. }
  1158. get name() {
  1159. return this._name;
  1160. }
  1161. get type() {
  1162. return this._type;
  1163. }
  1164. get value() {
  1165. return this._value;
  1166. }
  1167. get visible() {
  1168. return this._visible == false ? false : true;
  1169. }
  1170. };
  1171. tf.Tensor = class {
  1172. constructor(tensor, name, kind) {
  1173. this._name = name;
  1174. this._kind = kind || null;
  1175. if (tensor) {
  1176. this._type = new tf.TensorType(tensor.dtype, tensor.tensor_shape || tensor.tensorShape);
  1177. this._tensor = tensor;
  1178. if (Object.prototype.hasOwnProperty.call(tensor, 'tensor_content')) {
  1179. this._buffer = tensor.tensor_content;
  1180. }
  1181. else {
  1182. const DataType = tf.proto.tensorflow.DataType;
  1183. switch (tensor.dtype) {
  1184. case DataType.DT_BFLOAT16: {
  1185. const values = tensor.half_val || [];
  1186. this._buffer = new Uint8Array(values.length << 2);
  1187. const view = new DataView(this._buffer.buffer, this._buffer.byteOffset, this._buffer.byteLength);
  1188. for (let i = 0; i < values.length; i++) {
  1189. view.setUint32(i << 2, values[i] << 16, true);
  1190. }
  1191. break;
  1192. }
  1193. case DataType.DT_HALF: {
  1194. const values = tensor.half_val || [];
  1195. this._buffer = new Uint8Array(values.length << 1);
  1196. const view = new DataView(this._buffer.buffer, this._buffer.byteOffset, this._buffer.byteLength);
  1197. for (let i = 0; i < values.length; i++) {
  1198. view.setUint16(i << 1, values[i], true);
  1199. }
  1200. break;
  1201. }
  1202. case DataType.DT_FLOAT: {
  1203. this._data = tensor.float_val || null;
  1204. break;
  1205. }
  1206. case DataType.DT_DOUBLE: {
  1207. this._data = tensor.double_val || null;
  1208. break;
  1209. }
  1210. case DataType.DT_UINT8:
  1211. case DataType.DT_UINT16:
  1212. case DataType.DT_INT8:
  1213. case DataType.DT_INT16:
  1214. case DataType.DT_INT32: {
  1215. this._data = tensor.int_val || null;
  1216. break;
  1217. }
  1218. case DataType.DT_UINT32: {
  1219. this._data = tensor.uint32_val || null;
  1220. break;
  1221. }
  1222. case DataType.DT_INT64: {
  1223. this._data = tensor.int64_val || null;
  1224. break;
  1225. }
  1226. case DataType.DT_UINT64: {
  1227. this._data = tensor.uint64_val || null;
  1228. break;
  1229. }
  1230. case DataType.DT_BOOL: {
  1231. this._data = tensor.bool_val || null;
  1232. break;
  1233. }
  1234. case DataType.DT_STRING: {
  1235. this._data = tensor.string_val || null;
  1236. break;
  1237. }
  1238. default: {
  1239. throw new tf.Error("Unsupported tensor data type '" + tensor.dtype + "'.");
  1240. }
  1241. }
  1242. }
  1243. }
  1244. else {
  1245. this._type = new tf.TensorType('?', null);
  1246. this._tensor = null;
  1247. }
  1248. }
  1249. get name() {
  1250. return this._name;
  1251. }
  1252. get type() {
  1253. return this._type;
  1254. }
  1255. get kind() {
  1256. return this._kind;
  1257. }
  1258. set kind(value) {
  1259. this._kind = value;
  1260. }
  1261. get state() {
  1262. return this._context().state;
  1263. }
  1264. get value() {
  1265. const context = this._context();
  1266. if (context.state) {
  1267. return null;
  1268. }
  1269. context.limit = Number.MAX_SAFE_INTEGER;
  1270. return this._decode(context, 0);
  1271. }
  1272. toString() {
  1273. const context = this._context();
  1274. if (context.state) {
  1275. return '';
  1276. }
  1277. context.limit = 10000;
  1278. const value = this._decode(context, 0);
  1279. return tf.Tensor._stringify(value, '', ' ');
  1280. }
  1281. _context() {
  1282. const context = {};
  1283. context.state = null;
  1284. context.index = 0;
  1285. context.count = 0;
  1286. context.size = 1;
  1287. if (!this._tensor) {
  1288. context.state = 'Tensor has content.';
  1289. return context;
  1290. }
  1291. if (!this._tensor.dtype) {
  1292. context.state = 'Tensor has no data type.';
  1293. return context;
  1294. }
  1295. const shape = this._tensor.tensor_shape || this._tensor.tensorShape;
  1296. if (!shape || !shape.dim) {
  1297. context.state = 'Tensor has no dimensions.';
  1298. return context;
  1299. }
  1300. for (const dim of shape.dim) {
  1301. context.size = context.size * (dim.size ? dim.size : 0);
  1302. }
  1303. if (this._buffer) {
  1304. const DataType = tf.proto.tensorflow.DataType;
  1305. switch (this._tensor.dtype) {
  1306. case DataType.DT_BFLOAT16:
  1307. case DataType.DT_HALF:
  1308. case DataType.DT_FLOAT:
  1309. case DataType.DT_DOUBLE:
  1310. case DataType.DT_QINT8:
  1311. case DataType.DT_QUINT8:
  1312. case DataType.DT_INT8:
  1313. case DataType.DT_UINT8:
  1314. case DataType.DT_INT16:
  1315. case DataType.DT_UINT16:
  1316. case DataType.DT_INT32:
  1317. case DataType.DT_UINT32:
  1318. case DataType.DT_INT64:
  1319. case DataType.DT_UINT64:
  1320. if (!this._buffer || this._buffer.length === 0) {
  1321. context.state = 'Tensor has content.';
  1322. return context;
  1323. }
  1324. context.rawData = new DataView(this._buffer.buffer, this._buffer.byteOffset, this._buffer.byteLength);
  1325. break;
  1326. default:
  1327. break;
  1328. }
  1329. }
  1330. else if (this._data) {
  1331. if (this._data.length == context.size) {
  1332. context.data = this._data;
  1333. }
  1334. else if (this._data.length === 1) {
  1335. context.data = new Array(context.size).fill(this._data[0]);
  1336. }
  1337. else {
  1338. context.state = "Tensor has no data.";
  1339. return context;
  1340. }
  1341. }
  1342. else {
  1343. context.state = "Tensor has no data.";
  1344. return context;
  1345. }
  1346. if (!context.data && !context.rawData) {
  1347. context.state = "Tensor data type '" + this.type.dataType + "' is not implemented.";
  1348. return context;
  1349. }
  1350. context.shape = shape.dim.map((dim) => dim.size);
  1351. return context;
  1352. }
  1353. _decode(context, dimension) {
  1354. let shape = context.shape;
  1355. if (shape.length == 0) {
  1356. shape = [ 1 ];
  1357. }
  1358. const results = [];
  1359. const size = shape[dimension];
  1360. if (dimension == shape.length - 1) {
  1361. for (let i = 0; i < size; i++) {
  1362. if (context.count > context.limit) {
  1363. results.push('...');
  1364. return results;
  1365. }
  1366. if (context.data) {
  1367. const value = context.data[context.index++];
  1368. results.push((this._tensor.dtype == tf.proto.tensorflow.DataType.DT_STRING) ? tf.Utility.decodeText(value) : value);
  1369. context.count++;
  1370. }
  1371. else if (context.rawData) {
  1372. switch (this._tensor.dtype) {
  1373. case tf.proto.tensorflow.DataType.DT_HALF:
  1374. results.push(context.rawData.getFloat16(context.index, true));
  1375. context.index += 2;
  1376. context.count++;
  1377. break;
  1378. case tf.proto.tensorflow.DataType.DT_BFLOAT16:
  1379. case tf.proto.tensorflow.DataType.DT_FLOAT:
  1380. results.push(context.rawData.getFloat32(context.index, true));
  1381. context.index += 4;
  1382. context.count++;
  1383. break;
  1384. case tf.proto.tensorflow.DataType.DT_DOUBLE:
  1385. results.push(context.rawData.getFloat64(context.index, true));
  1386. context.index += 8;
  1387. context.count++;
  1388. break;
  1389. case tf.proto.tensorflow.DataType.DT_INT8:
  1390. results.push(context.rawData.getInt8(context.index));
  1391. context.index += 1;
  1392. context.count++;
  1393. break;
  1394. case tf.proto.tensorflow.DataType.DT_UINT8:
  1395. results.push(context.rawData.getUint8(context.index));
  1396. context.index += 1;
  1397. context.count++;
  1398. break;
  1399. case tf.proto.tensorflow.DataType.DT_INT16:
  1400. results.push(context.rawData.getInt16(context.index));
  1401. context.index += 2;
  1402. context.count++;
  1403. break;
  1404. case tf.proto.tensorflow.DataType.DT_UINT16:
  1405. results.push(context.rawData.getUint16(context.index));
  1406. context.index += 2;
  1407. context.count++;
  1408. break;
  1409. case tf.proto.tensorflow.DataType.DT_INT32:
  1410. results.push(context.rawData.getInt32(context.index, true));
  1411. context.index += 4;
  1412. context.count++;
  1413. break;
  1414. case tf.proto.tensorflow.DataType.DT_UINT32:
  1415. results.push(context.rawData.getUint32(context.index, true));
  1416. context.index += 4;
  1417. context.count++;
  1418. break;
  1419. case tf.proto.tensorflow.DataType.DT_INT64:
  1420. results.push(context.rawData.getInt64(context.index, true));
  1421. context.index += 8;
  1422. context.count++;
  1423. break;
  1424. case tf.proto.tensorflow.DataType.DT_UINT64:
  1425. results.push(context.rawData.getUint64(context.index, true));
  1426. context.index += 8;
  1427. context.count++;
  1428. break;
  1429. case tf.proto.tensorflow.DataType.DT_QINT8:
  1430. results.push(context.rawData.getInt8(context.index, true));
  1431. context.index += 1;
  1432. context.count++;
  1433. break;
  1434. case tf.proto.tensorflow.DataType.DT_QUINT8:
  1435. results.push(context.rawData.getUint8(context.index, true));
  1436. context.index += 1;
  1437. context.count++;
  1438. break;
  1439. default:
  1440. throw new tf.Error("Unsupported data type '" + this._tensor.dtype + "'.");
  1441. }
  1442. }
  1443. }
  1444. }
  1445. else {
  1446. for (let j = 0; j < size; j++) {
  1447. if (context.count > context.limit) {
  1448. results.push('...');
  1449. return results;
  1450. }
  1451. results.push(this._decode(context, dimension + 1, shape));
  1452. }
  1453. }
  1454. if (context.shape.length == 0) {
  1455. return results[0];
  1456. }
  1457. return results;
  1458. }
  1459. static _stringify(value, indentation, indent) {
  1460. if (Array.isArray(value)) {
  1461. const result = [];
  1462. result.push(indentation + '[');
  1463. const items = value.map((item) => tf.Tensor._stringify(item, indentation + indent, indent));
  1464. if (items.length > 0) {
  1465. result.push(items.join(',\n'));
  1466. }
  1467. result.push(indentation + ']');
  1468. return result.join('\n');
  1469. }
  1470. if (typeof value == 'string') {
  1471. return indentation + value;
  1472. }
  1473. if (value == Infinity) {
  1474. return indentation + 'Infinity';
  1475. }
  1476. if (value == -Infinity) {
  1477. return indentation + '-Infinity';
  1478. }
  1479. if (isNaN(value)) {
  1480. return indentation + 'NaN';
  1481. }
  1482. return indentation + value.toString();
  1483. }
  1484. };
  1485. tf.TensorType = class {
  1486. constructor(dtype, shape) {
  1487. this._dtype = dtype;
  1488. this._shape = new tf.TensorShape(shape);
  1489. }
  1490. get dataType() {
  1491. return this._dtype ? tf.Utility.dataType(this._dtype) : '?';
  1492. }
  1493. get shape() {
  1494. return this._shape;
  1495. }
  1496. toString() {
  1497. return this.dataType + this._shape.toString();
  1498. }
  1499. };
  1500. tf.TensorShape = class {
  1501. constructor(shape) {
  1502. this._shape = shape;
  1503. }
  1504. get dimensions() {
  1505. if (this._shape && this._shape.dim) {
  1506. if (this._shape.unknown_rank) {
  1507. return null;
  1508. }
  1509. if (this._shape.dim.length == 0) {
  1510. return [];
  1511. }
  1512. if (this._shape.dim.length == 1 && !this._shape.dim[0].size) {
  1513. return [ 0 ];
  1514. }
  1515. return this._shape.dim.map((dim) => (dim.size && dim.size != -1) ? dim.size : '?');
  1516. }
  1517. return null;
  1518. }
  1519. toString() {
  1520. if (this._shape && this._shape.dim) {
  1521. if (this._shape.unknown_rank) {
  1522. return '[-]';
  1523. }
  1524. if (this._shape.dim.length == 0) {
  1525. return '';
  1526. }
  1527. if (this._shape.dim.length == 1 && !this._shape.dim[0].size) {
  1528. return '[0]';
  1529. }
  1530. return '[' + this._shape.dim.map((dim) => (dim.size && dim.size != -1) ? dim.size.toString() : '?').join(',') + ']';
  1531. }
  1532. return '?';
  1533. }
  1534. };
  1535. tf.TensorBundle = class {
  1536. static open(stream, identifier, context) {
  1537. const format = !identifier.toLowerCase().endsWith('.index') ? 1 : 2;
  1538. const table = new tf.TensorBundle.Table(stream);
  1539. if (!table.entries.has('')) {
  1540. throw new tf.Error('Bundle header not available.');
  1541. }
  1542. if (format === 1) {
  1543. return Promise.resolve(new tf.TensorBundle(format, table.entries, []));
  1544. }
  1545. const buffer = table.entries.get('');
  1546. const reader = protobuf.BinaryReader.open(buffer);
  1547. const header = tf.proto.tensorflow.BundleHeaderProto.decode(reader);
  1548. const numShards = header.num_shards;
  1549. const promises = [];
  1550. for (let i = 0; i < numShards; i++) {
  1551. const shardIndex = ('0000' + i).slice(-5);
  1552. const shardCount = ('0000' + numShards).slice(-5);
  1553. const filename = identifier.split('.');
  1554. filename.pop();
  1555. const basename = filename.join('.');
  1556. const name = basename + '.data-' + shardIndex + '-of-' + shardCount;
  1557. promises.push(context.request(name, null));
  1558. }
  1559. return Promise.all(promises).then((streams) => {
  1560. return new tf.TensorBundle(format, table.entries, streams);
  1561. }).catch((error) => {
  1562. context.exception(error, false);
  1563. return new tf.TensorBundle(format, table.entries, null);
  1564. });
  1565. }
  1566. constructor(format, entries, streams) {
  1567. this._format = format;
  1568. this._tensors = [];
  1569. switch (format) {
  1570. case 1: {
  1571. const buffer = entries.get('');
  1572. const reader = protobuf.BinaryReader.open(buffer);
  1573. const header = tf.proto.tensorflow.SavedTensorSlices.decode(reader);
  1574. const data = new Map();
  1575. for (const pair of entries) {
  1576. if (pair[0] !== '' && pair[0] !== 'global_step') {
  1577. const buffer = pair[1];
  1578. const reader = protobuf.BinaryReader.open(buffer);
  1579. const slices = tf.proto.tensorflow.SavedTensorSlices.decode(reader);
  1580. const name = slices.data.name;
  1581. const tensor = slices.data.data;
  1582. if (!data.has(name)) {
  1583. if (tensor.tensor_content && tensor.tensor_content.length > 0) {
  1584. data.set(name, { key: 'tensor_content', value: tensor.tensor_content });
  1585. }
  1586. else {
  1587. const keys = Object.keys(tensor).filter((key) => key.endsWith('_val') && tensor[key] && tensor[key].length > 0);
  1588. data.set(name, keys.length == 1 ? { key: keys[0], value: tensor[keys[0]] } : null);
  1589. }
  1590. }
  1591. else {
  1592. const item = data.get(name);
  1593. if (item !== null) {
  1594. if (tensor[item.key] && tensor[item.key].length > 0) {
  1595. item.value = item.value.concat(tensor[item.key]);
  1596. }
  1597. else {
  1598. data.set(name, null);
  1599. }
  1600. }
  1601. }
  1602. }
  1603. }
  1604. for (const meta of header.meta.tensor) {
  1605. if (meta.name !== 'global_step') {
  1606. const tensor = new tf.proto.tensorflow.TensorProto();
  1607. tensor.dtype = meta.type;
  1608. tensor.tensor_shape = meta.shape;
  1609. const item = data.get(meta.name);
  1610. if (item) {
  1611. tensor[item.key] = item.value;
  1612. }
  1613. this._tensors.push(new tf.Tensor(tensor, meta.name, null));
  1614. }
  1615. }
  1616. break;
  1617. }
  1618. case 2: {
  1619. entries.forEach((buffer, name) => {
  1620. if (name !== '') {
  1621. const reader = protobuf.BinaryReader.open(buffer);
  1622. const entry = tf.proto.tensorflow.BundleEntryProto.decode(reader);
  1623. const tensor = new tf.proto.tensorflow.TensorProto();
  1624. tensor.dtype = entry.dtype;
  1625. tensor.tensor_shape = entry.shape;
  1626. const offset = Number.isInteger(entry.offset) ? entry.offset : entry.offset.toNumber();
  1627. const size = Number.isInteger(entry.size) ? entry.size : entry.size.toNumber();
  1628. if (streams) {
  1629. const stream = streams[entry.shard_id];
  1630. stream.seek(offset);
  1631. tensor.tensor_content = stream.peek(size);
  1632. }
  1633. this._tensors.push(new tf.Tensor(tensor, name, null));
  1634. }
  1635. });
  1636. break;
  1637. }
  1638. default: {
  1639. throw new tf.Error("Unsupported Tensor Bundle format '" + format + "'.");
  1640. }
  1641. }
  1642. }
  1643. get format() {
  1644. return this._format;
  1645. }
  1646. get tensors() {
  1647. return this._tensors;
  1648. }
  1649. };
  1650. tf.TensorBundle.Table = class {
  1651. constructor(stream) {
  1652. // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/io/table.cc
  1653. this.entries = new Map();
  1654. if (stream.length <= 54) {
  1655. throw new tf.Error('Invalid index file size.');
  1656. }
  1657. stream.seek(-48);
  1658. const buffer = stream.peek(48);
  1659. const reader = new tf.BinaryReader(buffer);
  1660. reader.seek(-8);
  1661. const signature = [ 0x57, 0xfb, 0x80, 0x8b, 0x24, 0x75, 0x47, 0xdb ];
  1662. if (!reader.read(8).every((value, index) => value === signature[index])) {
  1663. throw new tf.Error('Invalid table signature.');
  1664. }
  1665. reader.seek(-48); // kEncodedLength
  1666. reader.varint64(); // metaindex offset
  1667. reader.varint64(); // metaindex size
  1668. const indexOffset = reader.varint64();
  1669. const indexSize = reader.varint64();
  1670. const indexBlock = new tf.TensorBundle.Table.Block(stream, indexOffset, indexSize);
  1671. for (const entry of indexBlock.entries) {
  1672. const valueReader = new tf.BinaryReader(entry[1]);
  1673. const offset = valueReader.varint64();
  1674. const size = valueReader.varint64();
  1675. const block = new tf.TensorBundle.Table.Block(stream, offset, size);
  1676. for (const pair of block.entries) {
  1677. this.entries.set(pair[0], pair[1]);
  1678. }
  1679. }
  1680. stream.seek(0);
  1681. }
  1682. };
  1683. tf.TensorBundle.Table.Block = class {
  1684. constructor(stream, offset, size) {
  1685. // https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/lib/io/block.cc
  1686. this.entries = new Map();
  1687. stream.seek(offset);
  1688. const buffer = stream.read(size); // blockContents
  1689. const compression = stream.byte();
  1690. stream.skip(4); // crc32
  1691. let reader = new tf.BinaryReader(buffer);
  1692. switch (compression) {
  1693. case 0: // kNoCompression
  1694. break;
  1695. case 1: // kSnappyCompression
  1696. reader = new tf.BinaryReader(reader.unsnappy());
  1697. break;
  1698. default:
  1699. throw new tf.Error("Unsupported block compression '" + compression + "'.");
  1700. }
  1701. reader.seek(-4);
  1702. const numRestarts = reader.int32();
  1703. reader.seek(-4 - (4 * numRestarts));
  1704. const restartOffsets = [];
  1705. for (let i = 0; i < numRestarts; i++) {
  1706. restartOffsets.push(reader.int32());
  1707. }
  1708. const decoder = new TextDecoder();
  1709. for (let i = 0; i < numRestarts; i++) {
  1710. reader.seek(restartOffsets[i]);
  1711. let key = '';
  1712. while (reader.position < reader.length) {
  1713. const sharedSize = reader.varint32(); // index shared size
  1714. const nonSharedSize = reader.varint32(); // index non shared size
  1715. const valueSize = reader.varint32();
  1716. if (sharedSize === 0 && nonSharedSize === 0 && valueSize === 0) {
  1717. break;
  1718. }
  1719. key = key.substring(0, sharedSize);
  1720. key = key + decoder.decode(reader.read(nonSharedSize));
  1721. const value = reader.read(valueSize);
  1722. this.entries.set(key, value);
  1723. }
  1724. }
  1725. }
  1726. };
  1727. tf.BinaryReader = class {
  1728. constructor(buffer) {
  1729. this._buffer = buffer;
  1730. this._position = 0;
  1731. this._length = this._buffer.length;
  1732. this._dataView = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
  1733. this._decoder = new TextDecoder('utf-8');
  1734. }
  1735. get position() {
  1736. return this._position;
  1737. }
  1738. get length() {
  1739. return this._length;
  1740. }
  1741. seek(position) {
  1742. this._position = position >= 0 ? position : this._length + position;
  1743. if (this._position > this._length) {
  1744. throw new tf.Error('Expected ' + (this._position - this._length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
  1745. }
  1746. }
  1747. skip(offset) {
  1748. this._position += offset;
  1749. if (this._position > this._length) {
  1750. throw new tf.Error('Expected ' + (this._position - this._length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
  1751. }
  1752. }
  1753. read(size) {
  1754. const position = this._position;
  1755. this.skip(size);
  1756. return this._buffer.subarray(position, this._position);
  1757. }
  1758. byte() {
  1759. const position = this._position;
  1760. this.skip(1);
  1761. return this._dataView.getUint8(position);
  1762. }
  1763. uint16() {
  1764. const position = this._position;
  1765. this.skip(2);
  1766. return this._dataView.getUint16(position, true);
  1767. }
  1768. int32() {
  1769. const position = this._position;
  1770. this.skip(4);
  1771. return this._dataView.getInt32(position, true);
  1772. }
  1773. uint32() {
  1774. const position = this._position;
  1775. this.skip(4);
  1776. return this._dataView.getUint32(position, true);
  1777. }
  1778. uint64() {
  1779. const position = this._position;
  1780. this.skip(4);
  1781. return this._dataView.getUint64(position, true);
  1782. }
  1783. string() {
  1784. const size = this.uint32();
  1785. const buffer = this.read(size);
  1786. return this._decoder.decode(buffer);
  1787. }
  1788. varint32() {
  1789. return this.varint64();
  1790. }
  1791. varint64() {
  1792. let result = 0;
  1793. for (let shift = 0; shift <= 63; shift += 7) {
  1794. const byte = this.byte();
  1795. if (byte & 128) {
  1796. result |= (byte & 127) << shift;
  1797. }
  1798. else {
  1799. result |= byte << shift;
  1800. break;
  1801. }
  1802. }
  1803. return result;
  1804. }
  1805. unsnappy() {
  1806. const data = new Uint8Array(this.varint64());
  1807. const mask = [0, 0xff, 0xffff, 0xffffff, 0xffffffff];
  1808. let position = 0;
  1809. while (this._position < this._length) {
  1810. let length = 0;
  1811. const c = this.byte();
  1812. switch (c & 0x03) {
  1813. case 0: {
  1814. length = (c >>> 2) + 1;
  1815. if (length > 60) {
  1816. const short = length - 60;
  1817. length = (this.uint32() & mask[short]) + 1;
  1818. this._position += short - 4;
  1819. }
  1820. data.set(this.read(length), position);
  1821. break;
  1822. }
  1823. case 1: {
  1824. length = ((c >>> 2) & 0x07) + 4;
  1825. const offset = this.byte() + ((c >>> 5) << 8);
  1826. data.set(data.subarray(position - offset, position - offset + length), position);
  1827. break;
  1828. }
  1829. case 2: {
  1830. length = (c >>> 2) + 1;
  1831. const offset = this.uint16();
  1832. data.set(data.subarray(position - offset, position - offset + length), position);
  1833. break;
  1834. }
  1835. case 3: {
  1836. length = (c >>> 2) + 1;
  1837. const offset = this.uint32();
  1838. data.set(data.subarray(position - offset, position - offset + length), position);
  1839. break;
  1840. }
  1841. default: {
  1842. break;
  1843. }
  1844. }
  1845. position += length;
  1846. }
  1847. return data;
  1848. }
  1849. };
  1850. tf.EventFileReader = class {
  1851. static open(stream) {
  1852. if (stream.length < 16) {
  1853. return null;
  1854. }
  1855. const masked_crc32c = (bytes) => {
  1856. const poly = 0x82f63b78;
  1857. let crc = 0xffffffff;
  1858. for (let n = 0; n < bytes.length; n++) {
  1859. crc ^= bytes[n];
  1860. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1861. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1862. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1863. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1864. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1865. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1866. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1867. crc = crc & 1 ? (crc >>> 1) ^ poly : crc >>> 1;
  1868. crc = crc >>> 0;
  1869. }
  1870. crc = crc ^ 0xffffffff;
  1871. crc = crc >>> 0;
  1872. crc = ((crc >> 15) | (crc << 17)) + 0xa282ead8;
  1873. crc = crc >>> 0;
  1874. return crc;
  1875. };
  1876. const buffer = stream.peek(12);
  1877. const reader = new tf.BinaryReader(buffer);
  1878. const length_bytes = reader.read(8);
  1879. const length_crc = reader.uint32();
  1880. if (masked_crc32c(length_bytes) !== length_crc) {
  1881. return null;
  1882. }
  1883. return new tf.EventFileReader(stream);
  1884. }
  1885. constructor(stream) {
  1886. this._stream = stream;
  1887. }
  1888. read() {
  1889. if (this._stream.position < this._stream.length) {
  1890. const uint64 = (stream) => {
  1891. const buffer = stream.read(8);
  1892. const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
  1893. return view.getUint64(0, true).toNumber();
  1894. };
  1895. const length = uint64(this._stream);
  1896. this._stream.skip(4); // masked crc of length
  1897. const buffer = this._stream.read(length);
  1898. const reader = protobuf.BinaryReader.open(buffer);
  1899. const event = tf.proto.tensorflow.Event.decode(reader);
  1900. this._stream.skip(4); // masked crc of data
  1901. return event;
  1902. }
  1903. return null;
  1904. }
  1905. };
  1906. tf.GraphMetadata = class {
  1907. constructor(metadata, library) {
  1908. this._metadata = metadata;
  1909. this._functions = new Map();
  1910. this._attributes = new Map();
  1911. this._visibleCache = new Map();
  1912. if (library && Array.isArray(library.function)) {
  1913. for (const func of library.function) {
  1914. const name = func.signature.name;
  1915. if (this._functions.has(func.name)) {
  1916. throw new tf.Error("Duplicate function name '" + func.name + "'.");
  1917. }
  1918. this._functions.set(name, func);
  1919. }
  1920. }
  1921. }
  1922. type(name) {
  1923. if (this._functions.has(name)) {
  1924. const func = this._functions.get(name);
  1925. if (func instanceof tf.Function) {
  1926. return func;
  1927. }
  1928. this._functions.set(name, new tf.Function(this, func.signature.name, func));
  1929. return this._functions.get(name);
  1930. }
  1931. const type = this._metadata.type(name);
  1932. if (!type) {
  1933. this._functions.set(name, new tf.Function(this, name, null));
  1934. return this._functions.get(name);
  1935. }
  1936. return type;
  1937. }
  1938. attribute(type, name) {
  1939. const key = type + '::' + name;
  1940. if (!this._attributes.has(key)) {
  1941. const schema = this.type(type);
  1942. if (schema && schema.attributes) {
  1943. for (const attribute of schema.attributes) {
  1944. const key = type + '::' + attribute.name;
  1945. this._attributes.set(key, attribute);
  1946. }
  1947. }
  1948. }
  1949. return this._attributes.get(key);
  1950. }
  1951. visible(type, name) {
  1952. if (!this._visibleCache.has(type)) {
  1953. const set = new Set();
  1954. const schema = this.type(type);
  1955. if (schema && schema.inputs) {
  1956. for (const input of schema.inputs) {
  1957. if (input.typeAttr) {
  1958. set.add(input.typeAttr);
  1959. }
  1960. else if (input.typeListAttr) {
  1961. set.add(input.typeListAttr);
  1962. }
  1963. if (input.numberAttr) {
  1964. set.add(input.numberAttr);
  1965. }
  1966. }
  1967. }
  1968. if (schema && schema.outputs) {
  1969. for (const output of schema.outputs) {
  1970. if (output.typeAttr) {
  1971. set.add(output.typeAttr);
  1972. }
  1973. else if (output.typeListAttr) {
  1974. set.add(output.typeListAttr);
  1975. }
  1976. if (output.numberAttr) {
  1977. set.add(output.numberAttr);
  1978. }
  1979. }
  1980. }
  1981. this._visibleCache.set(type, set);
  1982. }
  1983. return !this._visibleCache.get(type).has(name);
  1984. }
  1985. };
  1986. tf.Utility = class {
  1987. static decodeText(value) {
  1988. if (typeof value === 'string') {
  1989. return value;
  1990. }
  1991. if (value.length === 0) {
  1992. return '';
  1993. }
  1994. tf.Utility._utf8Decoder = tf.Utility._utf8Decoder || new TextDecoder('utf-8');
  1995. return tf.Utility._utf8Decoder.decode(value);
  1996. }
  1997. static dataType(type) {
  1998. if (!tf.Utility._dataTypes) {
  1999. const dataTypes = new Map();
  2000. const DataType = tf.proto.tensorflow.DataType;
  2001. for (let key of Object.keys(DataType)) {
  2002. const value = DataType[key];
  2003. key = key.startsWith('DT_') ? key.substring(3) : key;
  2004. dataTypes.set(value, key.toLowerCase());
  2005. }
  2006. dataTypes.set(DataType.DT_HALF, 'float16');
  2007. dataTypes.set(DataType.DT_FLOAT, 'float32');
  2008. dataTypes.set(DataType.DT_DOUBLE, 'float64');
  2009. tf.Utility._dataTypes = dataTypes;
  2010. }
  2011. return tf.Utility._dataTypes.has(type) ? tf.Utility._dataTypes.get(type) : '?';
  2012. }
  2013. static dataTypeKey(type) {
  2014. if (!tf.Utility._dataTypeKeys) {
  2015. const dataTypeKeys = new Map();
  2016. const DataType = tf.proto.tensorflow.DataType;
  2017. for (let key of Object.keys(DataType)) {
  2018. const value = DataType[key];
  2019. key = key.startsWith('DT_') ? key.substring(3) : key;
  2020. dataTypeKeys.set(key.toLowerCase(), value);
  2021. }
  2022. dataTypeKeys.set('float16', DataType.DT_HALF);
  2023. dataTypeKeys.set('float32', DataType.DT_FLOAT);
  2024. dataTypeKeys.set('float64', DataType.DT_DOUBLE);
  2025. tf.Utility._dataTypeKeys = dataTypeKeys;
  2026. }
  2027. return tf.Utility._dataTypeKeys.get(type);
  2028. }
  2029. static createGraph(metadata, nodes, output_arg_map) {
  2030. const context = {};
  2031. context.inputs = [];
  2032. context.outputs = [];
  2033. context.nodes = [];
  2034. const namespaces = new Set();
  2035. const node_map = new Map();
  2036. for (const node of nodes) {
  2037. const nodeName = node.name;
  2038. node_map.set(nodeName, node);
  2039. if (node.op != 'Const') {
  2040. const index = nodeName.lastIndexOf('/');
  2041. if (index != -1) {
  2042. const namespace = nodeName.substring(0, index);
  2043. namespaces.add(namespace);
  2044. }
  2045. }
  2046. node.output = [];
  2047. }
  2048. for (const node of nodes) {
  2049. const inputs = node.input;
  2050. node.input = [];
  2051. node.controlDependencies = [];
  2052. for (const input of inputs) {
  2053. const split = input.split(':', 3);
  2054. const input_name = split[0];
  2055. const input_index = split.length == 1 ? 0 : parseInt(split[split.length - 1]);
  2056. const from_name = input_name.startsWith('^') ? input_name.substring(1) : input_name;
  2057. const from = node_map.get(from_name);
  2058. const output_name = input_index == 0 ? from_name : from_name + ':' + input_index.toString();
  2059. const input_arg = from ? { name: output_name, from: from } : { name: output_name };
  2060. if (input_name.startsWith('^')) {
  2061. node.controlDependencies.push(input_arg);
  2062. }
  2063. else {
  2064. node.input.push(input_arg);
  2065. }
  2066. if (from) {
  2067. for (let i = from.output.length; i <= input_index; i++) {
  2068. from.output.push({ name: i === 0 ? from_name : from_name + ':' + i.toString(), to: [] });
  2069. }
  2070. from.output[input_index].to.push(node);
  2071. }
  2072. }
  2073. }
  2074. if (output_arg_map) {
  2075. for (const node of nodes) {
  2076. if (output_arg_map.has(node.name)) {
  2077. node.output.push({ name: node.name, to: [] });
  2078. }
  2079. }
  2080. }
  2081. const initializers = new Map();
  2082. const map_tensor = (name, node, kind) => {
  2083. if (node && node.op === 'Const' && node.input.length === 0 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  2084. const value = node.attr.value;
  2085. if (value && Object.prototype.hasOwnProperty.call(value, 'tensor')) {
  2086. const tensor = new tf.Tensor(value.tensor, name, kind);
  2087. return new tf.Argument(name, tensor.type, tensor);
  2088. }
  2089. }
  2090. return null;
  2091. };
  2092. const map_resource = (name, node, tensor) => {
  2093. if (node && node.op === 'Placeholder' && node.input.length === 0 && node.output.length === 1 && node.controlDependencies.length === 0) {
  2094. const dtype = node.attr.dtype.type;
  2095. if (dtype === tf.proto.tensorflow.DataType.DT_RESOURCE) {
  2096. return new tf.Argument(name, null, tensor);
  2097. }
  2098. }
  2099. return null;
  2100. };
  2101. for (const node of node_map.values()) {
  2102. if (node.op === 'Identity' && node.input.length === 1 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  2103. const initializer = map_tensor(node.name, node.input[0].from, 'Identity Constant');
  2104. if (initializer) {
  2105. initializers.set(initializer.name, initializer);
  2106. node_map.delete(initializer.name);
  2107. node_map.delete(node.input[0].name);
  2108. }
  2109. const identity = node.input[0].from;
  2110. if (identity && identity.op === 'Identity' && identity.input.length === 1 && identity.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  2111. const initializer = map_tensor(node.name, identity.input[0].from, 'Identity Constant');
  2112. if (initializer) {
  2113. initializers.set(initializer.name, initializer);
  2114. node_map.delete(initializer.name);
  2115. node_map.delete(initializer.name);
  2116. node_map.delete(identity.name);
  2117. node_map.delete(node.name);
  2118. }
  2119. }
  2120. }
  2121. }
  2122. for (const node of node_map.values()) {
  2123. const initializer = map_tensor(node.name, node, 'Const');
  2124. if (initializer) {
  2125. initializers.set(initializer.name, initializer);
  2126. node_map.delete(node.name);
  2127. node_map.delete(initializer.name);
  2128. }
  2129. }
  2130. for (const node of node_map.values()) {
  2131. if (node.op === 'ReadVariableOp' && node.input.length === 1 && node.output.length === 1 && node.output[0].to.length === 1 && node.controlDependencies.length === 0) {
  2132. if (node.attr && node.attr.dtype && node.attr._output_shapes && node.attr._output_shapes.list && node.attr._output_shapes.list.shape) {
  2133. const tensor = new tf.proto.tensorflow.TensorProto();
  2134. tensor.dtype = node.attr.dtype.type;
  2135. tensor.tensor_shape = node.attr._output_shapes.list.shape[0];
  2136. const name = node.name;
  2137. const initializer = map_resource(name, node.input[0].from, new tf.Tensor(tensor, name, 'Resource Variable'));
  2138. if (initializer) {
  2139. initializers.set(initializer.name, initializer);
  2140. node_map.delete(initializer.name);
  2141. node_map.delete(node.input[0].name);
  2142. }
  2143. }
  2144. }
  2145. }
  2146. const input_map = new Map();
  2147. for (const node of node_map.values()) {
  2148. if (node.op == 'Placeholder' && node.input.length === 0 && node.output.length === 1 && node.controlDependencies.length === 0) {
  2149. const dtype = node.attr.dtype;
  2150. const shape = node.attr.shape;
  2151. if (dtype && dtype.type && shape && shape.shape) {
  2152. const name = node.name;
  2153. const type = new tf.TensorType(dtype.type, shape.shape);
  2154. const argument = new tf.Argument(name, type, null);
  2155. input_map.set(name, new tf.Parameter(name, [ argument ]));
  2156. node_map.delete(name);
  2157. }
  2158. }
  2159. }
  2160. const updatePyTorch = (node_map) => {
  2161. for (const node of node_map.values()) {
  2162. if (node.op === 'prim::Constant' && node.input.length === 0 && node.controlDependencies.length === 0 && node.attr && Object.keys(node.attr).length === 1 && node.attr.attr && node.attr.attr.s) {
  2163. const value = tf.Utility.decodeText(node.attr.attr.s);
  2164. const match = /{\s*value\s*:\s*(.*)\s*}/.exec(value);
  2165. if (match) {
  2166. node.value = match[1].trim();
  2167. }
  2168. const empty = /{\s*}/.exec(value);
  2169. if (empty) {
  2170. node.value = null;
  2171. }
  2172. }
  2173. if (node.op === 'prim::GetAttr' && node.input.length === 1 && node.controlDependencies.length === 0 && node.attr && Object.keys(node.attr).length === 1 && node.attr.attr && node.attr.attr.s) {
  2174. const value = tf.Utility.decodeText(node.attr.attr.s);
  2175. const match = /{\s*name\s*:\s*([A-za-z0-9_]*)\s*}/.exec(value);
  2176. if (match) {
  2177. node.value = match[1].trim();
  2178. }
  2179. }
  2180. if (node.op === 'IO Node' && node.controlDependencies.length === 0) {
  2181. const shape = node.attr && node.attr._output_shapes && node.attr._output_shapes.list && node.attr._output_shapes.list.shape ? node.attr._output_shapes.list.shape[0] : null;
  2182. const type = shape ? new tf.TensorType('?', shape) : null;
  2183. if (node.input.length === 0 && node.output.length === 1) {
  2184. context.inputs.push(new tf.Parameter(node.name, [
  2185. new tf.Argument(node.output[0].name, type, null)
  2186. ]));
  2187. node_map.delete(node.name);
  2188. }
  2189. if (node.input.length === 1 && node.output.length === 0) {
  2190. context.outputs.push(new tf.Parameter(node.name, [
  2191. new tf.Argument(node.input[0].name, type, null)
  2192. ]));
  2193. node_map.delete(node.name);
  2194. }
  2195. }
  2196. if (Object.keys(node.attr).length === 2 &&
  2197. node.attr.attr && node.attr.attr.s && node.attr._output_shapes) {
  2198. const value = tf.Utility.decodeText(node.attr.attr.s);
  2199. if (/\s*/.exec(value) || /{\s*}/.exec(value)) {
  2200. node.attr = {};
  2201. delete node._output_shapes;
  2202. }
  2203. }
  2204. }
  2205. const remove_input = (input, node) => {
  2206. const from = input.from;
  2207. if (from) {
  2208. for (const output of from.output) {
  2209. output.to = output.to.filter((to) => to !== node);
  2210. }
  2211. if (from.output.every((output) => output.to.length === 0) && from.controlDependencies.length === 0) {
  2212. from.remove = true;
  2213. }
  2214. delete input.from;
  2215. }
  2216. };
  2217. for (const node of node_map.values()) {
  2218. if (node.op === 'prim::ListConstruct' && node.input.every((input) => input.from.value !== undefined) && node.controlDependencies.length === 0) {
  2219. node.value = node.input.map((input) => input.from.value);
  2220. for (const input of node.input) {
  2221. remove_input(input, node);
  2222. }
  2223. node.input = [];
  2224. }
  2225. }
  2226. for (const node of node_map.values()) {
  2227. const remove = new Set();
  2228. for (let i = 0; i < node.input.length; i++) {
  2229. const input = node.input[i];
  2230. const from = input.from;
  2231. if (from) {
  2232. if (from.op === 'prim::GetAttr' && from.input.length === 1 && from.output.length === 1 && from.controlDependencies.length === 0 && from.value !== undefined) {
  2233. remove_input(input, node);
  2234. input.label = from.value;
  2235. const tensor = new tf.Tensor(null, input.name, from.op);
  2236. const argument = new tf.Argument(input.name, null, tensor);
  2237. initializers.set(input.name, argument);
  2238. }
  2239. if (from.op === 'prim::Constant' && from.input.length === 0 && from.controlDependencies.length === 0 && from.value !== undefined) {
  2240. input.constant = from.value;
  2241. remove_input(input, node);
  2242. remove.add(input.name);
  2243. }
  2244. if (from.op === 'prim::ListConstruct' && from.output.length === 1 && from.controlDependencies.length === 0 && from.value !== undefined) {
  2245. input.list = from.value;
  2246. remove_input(input, node);
  2247. remove.add(input.name);
  2248. }
  2249. }
  2250. }
  2251. if (node.__metadata__) {
  2252. for (const metadata of node.__metadata__) {
  2253. const parameters = Array.prototype.slice.call(metadata.inputs || []).concat(Array.prototype.slice.call(metadata.attributes || []));
  2254. let match = true;
  2255. const inputs = Array.from(node.input);
  2256. if (inputs.length > parameters.length) {
  2257. match = false;
  2258. }
  2259. while (inputs.length > 0 && match) {
  2260. match = false;
  2261. const input = inputs.shift();
  2262. delete input.metadata;
  2263. const parameter = parameters.shift();
  2264. switch (parameter.type) {
  2265. case 'Tensor': {
  2266. if ((input.constant === undefined && input.list === undefined) || input.constant === null) {
  2267. input.metadata = parameter;
  2268. match = true;
  2269. }
  2270. else {
  2271. inputs.unshift(input);
  2272. match = true;
  2273. }
  2274. break;
  2275. }
  2276. case 'int64': {
  2277. const value = parseInt(input.constant);
  2278. if (input.constant !== undefined && Number.isInteger(value)) {
  2279. input.attr = new tf.proto.tensorflow.AttrValue();
  2280. input.attr.i = value;
  2281. input.attr.metadata = parameter;
  2282. match = true;
  2283. }
  2284. break;
  2285. }
  2286. case 'float32': {
  2287. const value = parseFloat(input.constant);
  2288. if (input.constant !== undefined && !isNaN(value)) {
  2289. input.attr = new tf.proto.tensorflow.AttrValue();
  2290. input.attr.f = value;
  2291. input.attr.metadata = parameter;
  2292. match = true;
  2293. }
  2294. break;
  2295. }
  2296. case 'int64[]': {
  2297. if (Array.isArray(input.list)) {
  2298. const list = input.list.map((item) => parseInt(item));
  2299. if (list.every((value) => Number.isInteger(value))) {
  2300. input.attr = new tf.proto.tensorflow.AttrValue();
  2301. input.attr.list = new tf.proto.tensorflow.ListValue();
  2302. input.attr.list.i = list;
  2303. input.attr.metadata = parameter;
  2304. match = true;
  2305. }
  2306. }
  2307. break;
  2308. }
  2309. case 'boolean': {
  2310. if (input.constant === 'false' || input.constant === '0') {
  2311. input.attr = new tf.proto.tensorflow.AttrValue();
  2312. input.attr.b = false;
  2313. input.attr.metadata = parameter;
  2314. match = true;
  2315. }
  2316. else if (input.constant === 'true' || input.constant === '1') {
  2317. input.attr = new tf.proto.tensorflow.AttrValue();
  2318. input.attr.b = true;
  2319. input.attr.metadata = parameter;
  2320. match = true;
  2321. }
  2322. break;
  2323. }
  2324. case 'Scalar': {
  2325. const value = parseInt(input.constant);
  2326. if (input.constant !== undefined && Number.isInteger(value)) {
  2327. input.attr = new tf.proto.tensorflow.AttrValue();
  2328. input.attr.i = value;
  2329. input.attr.metadata = parameter;
  2330. match = true;
  2331. }
  2332. break;
  2333. }
  2334. default:
  2335. break;
  2336. }
  2337. }
  2338. if (match) {
  2339. node.metadata = Object.assign({}, metadata);
  2340. node.metadata.name = node.op;
  2341. break;
  2342. }
  2343. else {
  2344. for (const input of node.input) {
  2345. delete input.metadata;
  2346. delete input.attr;
  2347. }
  2348. }
  2349. }
  2350. }
  2351. node.input = node.input.filter((input, index) => {
  2352. if (input.attr) {
  2353. const name = input.attr.metadata ? input.attr.metadata.name : index.toString();
  2354. node.attr[name] = input.attr;
  2355. }
  2356. else if (input.constant !== undefined && input.constant !== null) {
  2357. const attr = new tf.proto.tensorflow.AttrValue();
  2358. attr.s = input.constant;
  2359. node.attr[index.toString()] = attr;
  2360. }
  2361. else if (input.list !== undefined) {
  2362. const attr = new tf.proto.tensorflow.AttrValue();
  2363. attr.list = new tf.proto.tensorflow.ListValue();
  2364. attr.list.s = input.list;
  2365. node.attr[index.toString()] = attr;
  2366. }
  2367. return !remove.has(input.name);
  2368. });
  2369. }
  2370. for (const node of node_map.values()) {
  2371. if (node.op === 'prim::GetAttr' && node.remove) {
  2372. node_map.delete(node.name);
  2373. }
  2374. if (node.op === 'prim::Constant' && node.remove) {
  2375. node_map.delete(node.name);
  2376. }
  2377. if (node.op === 'prim::ListConstruct' && node.remove) {
  2378. node_map.delete(node.name);
  2379. }
  2380. }
  2381. };
  2382. updatePyTorch(node_map);
  2383. for (const input of input_map.values()) {
  2384. context.inputs.push(input);
  2385. }
  2386. for (const node of node_map.values()) {
  2387. context.nodes.push(new tf.Node(metadata, node, namespaces, initializers));
  2388. }
  2389. return context;
  2390. }
  2391. };
  2392. tf.JsonReader = class {
  2393. static decodeGraphDef(json) {
  2394. const message = new tf.proto.tensorflow.GraphDef();
  2395. message.node = json.node.map((node) => tf.JsonReader.decodeNodeDef(node));
  2396. message.library = tf.JsonReader.decodeFunctionDefLibrary(json.library);
  2397. if (message.versions) {
  2398. message.versions = tf.JsonReader.decodeVersionDef(json.versions);
  2399. }
  2400. return message;
  2401. }
  2402. static decodeNodeDef(json) {
  2403. const message = new tf.proto.tensorflow.NodeDef();
  2404. message.name = json.name;
  2405. message.op = json.op;
  2406. message.input = json.input || [];
  2407. if (json.device) {
  2408. message.device = json.device;
  2409. }
  2410. message.attr = {};
  2411. if (json.attr) {
  2412. for (const entry of Object.entries(json.attr)) {
  2413. message.attr[entry[0]] = tf.JsonReader.decodeAttrValue(entry[1]);
  2414. }
  2415. }
  2416. return message;
  2417. }
  2418. static decodeAttrValue(json) {
  2419. const message = new tf.proto.tensorflow.AttrValue();
  2420. const keys = Object.keys(json);
  2421. if (keys.length !== 1) {
  2422. throw new tf.Error("Unsupported JSON tensorflow.AttrValue '" + JSON.stringify(keys) + "'.");
  2423. }
  2424. const key = keys[0];
  2425. const value = json[key];
  2426. switch (key) {
  2427. case 'type':
  2428. message.type = typeof value === 'number' ? value : tf.proto.tensorflow.DataType[value];
  2429. break;
  2430. case 'shape':
  2431. message.shape = tf.JsonReader.decodeTensorShapeProto(value);
  2432. break;
  2433. case 'tensor':
  2434. message.tensor = tf.JsonReader.decodeTensorProto(value);
  2435. break;
  2436. case 'b':
  2437. message[key] = value;
  2438. break;
  2439. case 'f':
  2440. message[key] = parseFloat(value);
  2441. break;
  2442. case 'i':
  2443. message[key] = parseInt(value, 10);
  2444. break;
  2445. case 's':
  2446. message[key] = typeof value === 'string' ? atob(value) : tf.Utility.decodeText(Uint8Array.from(value));
  2447. break;
  2448. case 'list':
  2449. message.list = tf.JsonReader.decodeAttrValueListValue(json.list);
  2450. break;
  2451. case 'func':
  2452. message[key]= value;
  2453. break;
  2454. default:
  2455. throw new tf.Error("Unsupported JSON 'tensorflow.AttrValue." + key + "'.");
  2456. }
  2457. return message;
  2458. }
  2459. static decodeAttrValueListValue(json) {
  2460. const message = new tf.proto.tensorflow.AttrValue.ListValue();
  2461. const properties = Object.keys(json);
  2462. if (properties.length > 0) {
  2463. const keys = properties.filter((key) => Array.isArray(json[key]) && json[key].length > 0);
  2464. if (keys.length !== 1) {
  2465. throw new tf.Error("Unsupported JSON tensorflow.AttrValue.ListValue '" + JSON.stringify(keys) + "'.");
  2466. }
  2467. const key = keys[0];
  2468. const list = json[key];
  2469. switch (key) {
  2470. case 'i':
  2471. message[key] = list.map((value) => parseInt(value, 10));
  2472. break;
  2473. case 's':
  2474. message[key] = list.map((value) => typeof value === 'string' ? atob(value) : tf.Utility.decodeText(Uint8Array.from(value)));
  2475. break;
  2476. case 'type':
  2477. message[key] = list.map((value) => tf.proto.tensorflow.DataType[value]);
  2478. break;
  2479. case 'shape':
  2480. message[key] = list.map((shape) => tf.JsonReader.decodeTensorShapeProto(shape));
  2481. break;
  2482. default:
  2483. throw new tf.Error("Unsupported JSON 'tensorflow.AttrValue.ListValue." + key + "'.");
  2484. }
  2485. }
  2486. return message;
  2487. }
  2488. static decodeTensorProto(json) {
  2489. const message = new tf.proto.tensorflow.TensorProto();
  2490. message.dtype = tf.proto.tensorflow.DataType[json.dtype];
  2491. message.tensor_shape = tf.JsonReader.decodeTensorShapeProto(json.tensorShape);
  2492. return message;
  2493. }
  2494. static decodeTensorShapeProto(json) {
  2495. const message = new tf.proto.tensorflow.TensorShapeProto();
  2496. message.dim = (json.dim || []).map((json) => {
  2497. const message = new tf.proto.tensorflow.TensorShapeProto.Dim();
  2498. message.size = json.size;
  2499. message.name = json.name;
  2500. return message;
  2501. });
  2502. return message;
  2503. }
  2504. static decodeVersionDef(json) {
  2505. const message = new tf.proto.tensorflow.VersionDef();
  2506. message.producer = json.producer;
  2507. message.min_consumer = json.min_consumer;
  2508. message.bad_consumers = json.bad_consumers ? json.bad_consumers : [];
  2509. return message;
  2510. }
  2511. static decodeFunctionDefLibrary(json) {
  2512. const message = new tf.proto.tensorflow.FunctionDefLibrary();
  2513. message.function = json ? (json.function || []).map((json) => tf.JsonReader.decodeFunctionDef(json)) : [];
  2514. return message;
  2515. }
  2516. static decodeFunctionDef(json) {
  2517. const message = new tf.proto.tensorflow.FunctionDef();
  2518. message.signature = tf.JsonReader.decodeOpDef(json.signature);
  2519. message.attr = {};
  2520. if (json.attr) {
  2521. for (const entry of Object.entries(json.attr)) {
  2522. message.attr[entry[0]] = tf.JsonReader.decodeAttrValue(entry[1]);
  2523. }
  2524. }
  2525. message.nodeDef = (json.nodeDef || []).map((json) => tf.JsonReader.decodeNodeDef(json));
  2526. message.ret = json.ret;
  2527. message.control_ret = json.control_ret;
  2528. return message;
  2529. }
  2530. static decodeOpDef(json) {
  2531. const message = new tf.proto.tensorflow.OpDef();
  2532. message.name = json.name;
  2533. message.input_arg = json.inputArg.map((json) => tf.JsonReader.decodeArgDef(json));
  2534. message.output_arg = json.outputArg.map((json) => tf.JsonReader.decodeArgDef(json));
  2535. return message;
  2536. }
  2537. static decodeArgDef(json) {
  2538. const message = new tf.proto.tensorflow.OpDef.ArgDef();
  2539. message.name = json.name;
  2540. message.description = json.decscription;
  2541. return message;
  2542. }
  2543. };
  2544. tf.Error = class extends Error {
  2545. constructor(message) {
  2546. super(message);
  2547. this.name = 'Error loading TensorFlow model.';
  2548. }
  2549. };
  2550. if (typeof module !== 'undefined' && typeof module.exports === 'object') {
  2551. module.exports.ModelFactory = tf.ModelFactory;
  2552. }