NativeCodeGenerator.cpp 126 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. #include "Base\ScriptContextProfiler.h"
  7. #if DBG
  8. Js::JavascriptMethod checkCodeGenThunk;
  9. #endif
  10. #ifdef ENABLE_PREJIT
  11. #define IS_PREJIT_ON() (Js::Configuration::Global.flags.Prejit)
  12. #else
  13. #define IS_PREJIT_ON() (DEFAULT_CONFIG_Prejit)
  14. #endif
  15. #define ASSERT_THREAD() AssertMsg(mainThreadId == GetCurrentThreadContextId(), \
  16. "Cannot use this member of native code generator from thread other than the creating context's current thread")
  17. NativeCodeGenerator::NativeCodeGenerator(Js::ScriptContext * scriptContext)
  18. : JsUtil::WaitableJobManager(scriptContext->GetThreadContext()->GetJobProcessor()),
  19. scriptContext(scriptContext),
  20. pendingCodeGenWorkItems(0),
  21. queuedFullJitWorkItemCount(0),
  22. foregroundAllocators(nullptr),
  23. backgroundAllocators(nullptr),
  24. byteCodeSizeGenerated(0),
  25. isClosed(false),
  26. isOptimizedForManyInstances(scriptContext->GetThreadContext()->IsOptimizedForManyInstances()),
  27. SetNativeEntryPoint(Js::FunctionBody::DefaultSetNativeEntryPoint),
  28. freeLoopBodyManager(scriptContext->GetThreadContext()->GetJobProcessor()),
  29. hasUpdatedQForDebugMode(false)
  30. #ifdef PROFILE_EXEC
  31. , foregroundCodeGenProfiler(nullptr)
  32. , backgroundCodeGenProfiler(nullptr)
  33. #endif
  34. {
  35. freeLoopBodyManager.SetNativeCodeGen(this);
  36. #if DBG_DUMP
  37. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag)
  38. && (Js::Configuration::Global.flags.AsmDumpMode != nullptr))
  39. {
  40. bool fileOpened = false;
  41. fileOpened = (0 == _wfopen_s(&this->asmFile, Js::Configuration::Global.flags.AsmDumpMode, L"wt"));
  42. if (!fileOpened)
  43. {
  44. size_t len = wcslen(Js::Configuration::Global.flags.AsmDumpMode);
  45. if (len < _MAX_PATH - 5)
  46. {
  47. wchar_t filename[_MAX_PATH];
  48. wcscpy_s(filename, _MAX_PATH, Js::Configuration::Global.flags.AsmDumpMode);
  49. wchar_t * number = filename + len;
  50. for (int i = 0; i < 1000; i++)
  51. {
  52. _itow_s(i, number, 5, 10);
  53. fileOpened = (0 == _wfopen_s(&this->asmFile, filename, L"wt"));
  54. if (fileOpened)
  55. {
  56. break;
  57. }
  58. }
  59. }
  60. if (!fileOpened)
  61. {
  62. this->asmFile = nullptr;
  63. AssertMsg(0, "Could not open file for AsmDump. The output will goto standard console");
  64. }
  65. }
  66. }
  67. else
  68. {
  69. this->asmFile = nullptr;
  70. }
  71. #endif
  72. #if DBG
  73. this->mainThreadId = GetCurrentThreadContextId();
  74. #endif
  75. Processor()->AddManager(this);
  76. this->freeLoopBodyManager.SetAutoClose(false);
  77. }
  78. NativeCodeGenerator::~NativeCodeGenerator()
  79. {
  80. Assert(this->IsClosed());
  81. #ifdef PROFILE_EXEC
  82. if (this->foregroundCodeGenProfiler != nullptr)
  83. {
  84. this->foregroundCodeGenProfiler->Release();
  85. }
  86. #endif
  87. if(this->foregroundAllocators != nullptr)
  88. {
  89. HeapDelete(this->foregroundAllocators);
  90. }
  91. if (this->backgroundAllocators)
  92. {
  93. #if DBG
  94. // PageAllocator is thread agile. This destructor can be called from background GC thread.
  95. // We have already removed this manager from the job queue and hence its fine to set the threadId to -1.
  96. // We can't DissociatePageAllocator here as its allocated ui thread.
  97. //this->Processor()->DissociatePageAllocator(allocator->GetPageAllocator());
  98. this->backgroundAllocators->emitBufferManager.GetHeapPageAllocator()->ClearConcurrentThreadId();
  99. this->backgroundAllocators->emitBufferManager.GetPreReservedHeapPageAllocator()->ClearConcurrentThreadId();
  100. this->backgroundAllocators->GetPageAllocator()->ClearConcurrentThreadId();
  101. #endif
  102. // The native code generator may be deleted after Close was called on the job processor. In that case, the
  103. // background thread is no longer running, so clean things up in the foreground.
  104. HeapDelete(this->backgroundAllocators);
  105. }
  106. #ifdef PROFILE_EXEC
  107. if (Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag))
  108. {
  109. while (this->backgroundCodeGenProfiler)
  110. {
  111. Js::ScriptContextProfiler *codegenProfiler = this->backgroundCodeGenProfiler;
  112. this->backgroundCodeGenProfiler = this->backgroundCodeGenProfiler->next;
  113. codegenProfiler->Release();
  114. }
  115. }
  116. else
  117. {
  118. Assert(this->backgroundCodeGenProfiler == nullptr);
  119. }
  120. #endif
  121. }
  122. void NativeCodeGenerator::Close()
  123. {
  124. Assert(!this->IsClosed());
  125. // Close FreeLoopBodyJobManager first, as it depends on NativeCodeGenerator to be open before it's removed
  126. this->freeLoopBodyManager.Close();
  127. // Remove only if it is not updated in the debug mode (and which goes to interpreter mode).
  128. if (!hasUpdatedQForDebugMode || Js::Configuration::Global.EnableJitInDebugMode())
  129. {
  130. Processor()->RemoveManager(this);
  131. }
  132. this->isClosed = true;
  133. Assert(!queuedFullJitWorkItems.Head());
  134. Assert(queuedFullJitWorkItemCount == 0);
  135. for(JsUtil::Job *job = workItems.Head(); job;)
  136. {
  137. JsUtil::Job *const next = job->Next();
  138. JobProcessed(job, /*succeeded*/ false);
  139. job = next;
  140. }
  141. workItems.Clear();
  142. // Only decommit here instead of releasing the memory, so we retain control over these addresses
  143. // Mitigate against the case the entry point is called after the script site is closed
  144. if (this->backgroundAllocators)
  145. {
  146. this->backgroundAllocators->emitBufferManager.Decommit();
  147. }
  148. if (this->foregroundAllocators)
  149. {
  150. this->foregroundAllocators->emitBufferManager.Decommit();
  151. }
  152. #if DBG_DUMP
  153. if (this->asmFile != nullptr)
  154. {
  155. if(0 != fclose(this->asmFile))
  156. {
  157. AssertMsg(0, "Could not close file for AsmDump. You may ignore this warning.");
  158. }
  159. }
  160. #endif
  161. }
  162. #if DBG_DUMP
  163. extern Func *CurrentFunc;
  164. #endif
  165. JsFunctionCodeGen *
  166. NativeCodeGenerator::NewFunctionCodeGen(Js::FunctionBody *functionBody, Js::EntryPointInfo* info)
  167. {
  168. return HeapNewNoThrow(JsFunctionCodeGen, this, functionBody, info, this->IsInDebugMode());
  169. }
  170. JsLoopBodyCodeGen *
  171. NativeCodeGenerator::NewLoopBodyCodeGen(Js::FunctionBody *functionBody, Js::EntryPointInfo* info)
  172. {
  173. return HeapNewNoThrow(JsLoopBodyCodeGen, this, functionBody, info, this->IsInDebugMode());
  174. }
  175. #ifdef ENABLE_PREJIT
  176. bool
  177. NativeCodeGenerator::DoBackEnd(Js::FunctionBody *fn)
  178. {
  179. if (PHASE_OFF(Js::BackEndPhase, fn))
  180. {
  181. return false;
  182. }
  183. if (fn->IsAsmJSModule() || fn->IsGeneratorAndJitIsDisabled())
  184. {
  185. return false;
  186. }
  187. return true;
  188. }
  189. void
  190. NativeCodeGenerator::GenerateAllFunctions(Js::FunctionBody * fn)
  191. {
  192. Assert(IS_PREJIT_ON());
  193. Assert(fn->GetDefaultFunctionEntryPointInfo()->entryPointIndex == 0);
  194. // Make sure this isn't a deferred function
  195. Assert(fn->GetFunctionBody() == fn);
  196. Assert(!fn->IsDeferred());
  197. if (DoBackEnd(fn))
  198. {
  199. if (fn->GetLoopCount() != 0 && fn->ForceJITLoopBody() && !IsInDebugMode())
  200. {
  201. // Only jit the loop body with /force:JITLoopBody
  202. for (uint i = 0; i < fn->GetLoopCount(); i++)
  203. {
  204. Js::LoopHeader * loopHeader = fn->GetLoopHeader(i);
  205. Js::EntryPointInfo * entryPointInfo = loopHeader->GetCurrentEntryPointInfo();
  206. this->GenerateLoopBody(fn, loopHeader, entryPointInfo);
  207. }
  208. }
  209. else
  210. {
  211. // A JIT attempt should have already been made through GenerateFunction
  212. Assert(!fn->GetDefaultFunctionEntryPointInfo()->IsNotScheduled());
  213. }
  214. }
  215. for (uint i = 0; i < fn->GetNestedCount(); i++)
  216. {
  217. Js::FunctionBody* functionToJIT = fn->GetNestedFunctionForExecution(i)->GetFunctionBody();
  218. GenerateAllFunctions(functionToJIT);
  219. }
  220. }
  221. #endif
  222. #if _M_ARM
  223. USHORT ArmExtractThumbImmediate16(PUSHORT address)
  224. {
  225. return ((address[0] << 12) & 0xf000) | // bits[15:12] in OP0[3:0]
  226. ((address[0] << 1) & 0x0800) | // bits[11] in OP0[10]
  227. ((address[1] >> 4) & 0x0700) | // bits[10:8] in OP1[14:12]
  228. ((address[1] >> 0) & 0x00ff); // bits[7:0] in OP1[7:0]
  229. }
  230. void ArmInsertThumbImmediate16(PUSHORT address, USHORT immediate)
  231. {
  232. USHORT opcode0;
  233. USHORT opcode1;
  234. opcode0 = address[0];
  235. opcode1 = address[1];
  236. opcode0 &= ~((0xf000 >> 12) | (0x0800 >> 1));
  237. opcode1 &= ~((0x0700 << 4) | (0x00ff << 0));
  238. opcode0 |= (immediate & 0xf000) >> 12; // bits[15:12] in OP0[3:0]
  239. opcode0 |= (immediate & 0x0800) >> 1; // bits[11] in OP0[10]
  240. opcode1 |= (immediate & 0x0700) << 4; // bits[10:8] in OP1[14:12]
  241. opcode1 |= (immediate & 0x00ff) << 0; // bits[7:0] in OP1[7:0]
  242. address[0] = opcode0;
  243. address[1] = opcode1;
  244. }
  245. #endif
  246. void DoFunctionRelocations(BYTE *function, DWORD functionOffset, DWORD functionSize, BYTE *module, size_t imageBase, IMAGE_SECTION_HEADER *textHeader, IMAGE_SECTION_HEADER *relocHeader)
  247. {
  248. PIMAGE_BASE_RELOCATION relocationBlock = (PIMAGE_BASE_RELOCATION)(module + relocHeader->PointerToRawData);
  249. for (; relocationBlock->VirtualAddress > 0 && ((BYTE *)relocationBlock < (module + relocHeader->PointerToRawData + relocHeader->SizeOfRawData)); )
  250. {
  251. DWORD blockOffset = relocationBlock->VirtualAddress - textHeader->VirtualAddress;
  252. // Skip relocation blocks that are before the function
  253. if ((blockOffset + 0x1000) > functionOffset)
  254. {
  255. unsigned short *relocation = (unsigned short *)((unsigned char *)relocationBlock + sizeof(IMAGE_BASE_RELOCATION));
  256. for (uint index = 0; index < ((relocationBlock->SizeOfBlock - sizeof(IMAGE_BASE_RELOCATION)) / 2); index++, relocation++)
  257. {
  258. int type = *relocation >> 12;
  259. int offset = *relocation & 0xfff;
  260. // If we are past the end of the function, we can stop.
  261. if ((blockOffset + offset) >= (functionOffset + functionSize))
  262. {
  263. break;
  264. }
  265. if ((blockOffset + offset) < functionOffset)
  266. {
  267. continue;
  268. }
  269. switch (type)
  270. {
  271. case IMAGE_REL_BASED_ABSOLUTE:
  272. break;
  273. #if _M_IX86
  274. case IMAGE_REL_BASED_HIGHLOW:
  275. {
  276. DWORD *patchAddrHL = (DWORD *) (function + blockOffset + offset - functionOffset);
  277. DWORD patchAddrHLOffset = *patchAddrHL - imageBase - textHeader->VirtualAddress;
  278. Assert((patchAddrHLOffset > functionOffset) && (patchAddrHLOffset < (functionOffset + functionSize)));
  279. *patchAddrHL = patchAddrHLOffset - functionOffset + (DWORD)function;
  280. }
  281. break;
  282. #elif defined(_M_X64_OR_ARM64)
  283. case IMAGE_REL_BASED_DIR64:
  284. {
  285. ULONGLONG *patchAddr64 = (ULONGLONG *) (function + blockOffset + offset - functionOffset);
  286. ULONGLONG patchAddr64Offset = *patchAddr64 - imageBase - textHeader->VirtualAddress;
  287. Assert((patchAddr64Offset > functionOffset) && (patchAddr64Offset < (functionOffset + functionSize)));
  288. *patchAddr64 = patchAddr64Offset - functionOffset + (ULONGLONG)function;
  289. }
  290. break;
  291. #else
  292. case IMAGE_REL_BASED_THUMB_MOV32:
  293. {
  294. USHORT *patchAddr = (USHORT *) (function + blockOffset + offset - functionOffset);
  295. DWORD address = ArmExtractThumbImmediate16(patchAddr) | (ArmExtractThumbImmediate16(patchAddr + 2) << 16);
  296. address = address - imageBase - textHeader->VirtualAddress - functionOffset + (DWORD)function;
  297. ArmInsertThumbImmediate16(patchAddr, (USHORT)(address & 0xFFFF));
  298. ArmInsertThumbImmediate16(patchAddr + 2, (USHORT)(address >> 16));
  299. }
  300. break;
  301. #endif
  302. default:
  303. Assert(false);
  304. break;
  305. }
  306. }
  307. }
  308. relocationBlock = (PIMAGE_BASE_RELOCATION) (((BYTE *) relocationBlock) + relocationBlock->SizeOfBlock);
  309. }
  310. }
  311. class AutoRestoreDefaultEntryPoint
  312. {
  313. public:
  314. AutoRestoreDefaultEntryPoint(Js::FunctionBody* functionBody):
  315. functionBody(functionBody)
  316. {
  317. this->oldDefaultEntryPoint = functionBody->GetDefaultFunctionEntryPointInfo();
  318. this->oldOriginalEntryPoint = functionBody->GetOriginalEntryPoint();
  319. this->newEntryPoint = functionBody->CreateNewDefaultEntryPoint();
  320. }
  321. ~AutoRestoreDefaultEntryPoint()
  322. {
  323. if (newEntryPoint && !newEntryPoint->IsCodeGenDone())
  324. {
  325. functionBody->RestoreOldDefaultEntryPoint(oldDefaultEntryPoint, oldOriginalEntryPoint, newEntryPoint);
  326. }
  327. }
  328. private:
  329. Js::FunctionBody* functionBody;
  330. Js::FunctionEntryPointInfo* oldDefaultEntryPoint;
  331. Js::JavascriptMethod oldOriginalEntryPoint;
  332. Js::FunctionEntryPointInfo* newEntryPoint;
  333. };
  334. //static
  335. void NativeCodeGenerator::Jit_TransitionFromSimpleJit(void *const framePointer)
  336. {
  337. TransitionFromSimpleJit(
  338. Js::ScriptFunction::FromVar(Js::JavascriptCallStackLayout::FromFramePointer(framePointer)->functionObject));
  339. }
  340. //static
  341. void NativeCodeGenerator::TransitionFromSimpleJit(Js::ScriptFunction *const function)
  342. {
  343. Assert(function);
  344. Js::FunctionBody *const functionBody = function->GetFunctionBody();
  345. Js::FunctionEntryPointInfo *const defaultEntryPointInfo = functionBody->GetDefaultFunctionEntryPointInfo();
  346. if(defaultEntryPointInfo == functionBody->GetSimpleJitEntryPointInfo())
  347. {
  348. Assert(functionBody->GetExecutionMode() == ExecutionMode::SimpleJit);
  349. Assert(function->GetFunctionEntryPointInfo() == defaultEntryPointInfo);
  350. // The latest entry point is the simple JIT, transition to the next execution mode and schedule a full JIT
  351. bool functionEntryPointUpdated = functionBody->GetScriptContext()->GetNativeCodeGenerator()->GenerateFunction(functionBody, function);
  352. if (functionEntryPointUpdated)
  353. {
  354. // Transition to the next execution mode after scheduling a full JIT, in case of OOM before the entry point is changed
  355. const bool transitioned = functionBody->TryTransitionToNextExecutionMode();
  356. Assert(transitioned);
  357. if (PHASE_TRACE(Js::SimpleJitPhase, functionBody))
  358. {
  359. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  360. Output::Print(
  361. L"SimpleJit (TransitionFromSimpleJit): function: %s (%s)",
  362. functionBody->GetDisplayName(),
  363. functionBody->GetDebugNumberSet(debugStringBuffer));
  364. Output::Flush();
  365. }
  366. }
  367. return;
  368. }
  369. if(function->GetFunctionEntryPointInfo() != defaultEntryPointInfo)
  370. {
  371. // A full JIT may have already been scheduled, or some entry point info got expired before the simple JIT entry point
  372. // was ready. In any case, the function's entry point info is not the latest, so update it.
  373. function->UpdateThunkEntryPoint(defaultEntryPointInfo, functionBody->GetDirectEntryPoint(defaultEntryPointInfo));
  374. }
  375. }
  376. #ifdef IR_VIEWER
  377. Js::Var
  378. NativeCodeGenerator::RejitIRViewerFunction(Js::FunctionBody *fn, Js::ScriptContext *requestContext)
  379. {
  380. /* Note: adapted from NativeCodeGenerator::GenerateFunction (NativeCodeGenerator.cpp) */
  381. Js::ScriptContext *scriptContext = fn->GetScriptContext();
  382. PageAllocator *pageAllocator = scriptContext->GetThreadContext()->GetPageAllocator();
  383. NativeCodeGenerator *nativeCodeGenerator = scriptContext->GetNativeCodeGenerator();
  384. AutoRestoreDefaultEntryPoint autoRestore(fn);
  385. Js::FunctionEntryPointInfo * entryPoint = fn->GetDefaultFunctionEntryPointInfo();
  386. JsFunctionCodeGen workitem(this, fn, entryPoint, this->IsInDebugMode());
  387. workitem.isRejitIRViewerFunction = true;
  388. workitem.irViewerRequestContext = scriptContext;
  389. workitem.SetJitMode(ExecutionMode::FullJit);
  390. entryPoint->SetCodeGenPendingWithStackAllocatedWorkItem();
  391. entryPoint->SetCodeGenQueued();
  392. const auto recyclableData = GatherCodeGenData(fn, fn, entryPoint, &workitem);
  393. workitem.SetRecyclableData(recyclableData);
  394. nativeCodeGenerator->CodeGen(pageAllocator, &workitem, true);
  395. return Js::CrossSite::MarshalVar(requestContext, workitem.GetIRViewerOutput(scriptContext));
  396. }
  397. #endif /* IR_VIEWER */
  398. ///----------------------------------------------------------------------------
  399. ///
  400. /// NativeCodeGenerator::GenerateFunction
  401. ///
  402. /// This is the main entry point for the runtime to call the native code
  403. /// generator.
  404. ///
  405. ///----------------------------------------------------------------------------
  406. bool
  407. NativeCodeGenerator::GenerateFunction(Js::FunctionBody *fn, Js::ScriptFunction * function)
  408. {
  409. ASSERT_THREAD();
  410. Assert(!fn->GetIsFromNativeCodeModule());
  411. Assert(fn->GetScriptContext()->GetNativeCodeGenerator() == this);
  412. Assert(fn->GetFunctionBody() == fn);
  413. Assert(!fn->IsDeferred());
  414. #if !defined(_M_ARM64)
  415. if (fn->IsGeneratorAndJitIsDisabled())
  416. {
  417. // JITing generator functions is not complete nor stable yet so it is off by default.
  418. // Also try/catch JIT support in generator functions is not a goal for threshold
  419. // release so JITing generators containing try blocks is disabled for now.
  420. return false;
  421. }
  422. if (IsInDebugMode() && fn->GetHasTry())
  423. {
  424. // Under debug mode disable JIT for functions that:
  425. // - have try
  426. return false;
  427. }
  428. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  429. if (Js::Configuration::Global.flags.Interpret &&
  430. fn->GetDisplayName() &&
  431. ::wcsstr(Js::Configuration::Global.flags.Interpret, fn->GetDisplayName()))
  432. {
  433. return false;
  434. }
  435. #endif
  436. if (fn->GetLoopCount() != 0 && fn->ForceJITLoopBody() && !IsInDebugMode())
  437. {
  438. // Don't code gen the function if the function has loop, ForceJITLoopBody is on,
  439. // unless we are in debug mode in which case JIT loop body is disabled, even if it's forced.
  440. return false;
  441. }
  442. // Create a work item with null entry point- we'll set it once its allocated
  443. AutoPtr<JsFunctionCodeGen> workItemAutoPtr(this->NewFunctionCodeGen(fn, nullptr));
  444. if ((JsFunctionCodeGen*) workItemAutoPtr == nullptr)
  445. {
  446. // OOM, just skip this work item and return.
  447. return false;
  448. }
  449. Js::FunctionEntryPointInfo* entryPointInfo = nullptr;
  450. if (function != nullptr)
  451. {
  452. entryPointInfo = fn->CreateNewDefaultEntryPoint();
  453. }
  454. else
  455. {
  456. entryPointInfo = fn->GetDefaultFunctionEntryPointInfo();
  457. Assert(fn->IsInterpreterThunk() || fn->IsSimpleJitOriginalEntryPoint());
  458. }
  459. #ifdef ASMJS_PLAT
  460. if (fn->GetIsAsmjsMode())
  461. {
  462. AnalysisAssert(function != nullptr);
  463. Js::FunctionEntryPointInfo* oldFuncObjEntryPointInfo = (Js::FunctionEntryPointInfo*)function->GetEntryPointInfo();
  464. Assert(oldFuncObjEntryPointInfo->GetIsAsmJSFunction()); // should be asmjs entrypoint info
  465. // Set asmjs to be true in entrypoint
  466. entryPointInfo->SetIsAsmJSFunction(true);
  467. // Move the ModuleAddress from old Entrypoint to new entry point
  468. entryPointInfo->SetModuleAddress(oldFuncObjEntryPointInfo->GetModuleAddress());
  469. // Update the native address of the older entry point - this should be either the TJ entrypoint or the Interpreter Entry point
  470. entryPointInfo->SetNativeAddress(oldFuncObjEntryPointInfo->address);
  471. // have a reference to TJ entrypointInfo, this will be queued for collection in checkcodegen
  472. entryPointInfo->SetOldFunctionEntryPointInfo(oldFuncObjEntryPointInfo);
  473. Assert(PHASE_ON1(Js::AsmJsJITTemplatePhase) || (!oldFuncObjEntryPointInfo->GetIsTJMode() && !entryPointInfo->GetIsTJMode()));
  474. // this changes the address in the entrypointinfo to be the AsmJsCodgenThunk
  475. function->UpdateThunkEntryPoint(entryPointInfo, NativeCodeGenerator::CheckAsmJsCodeGenThunk);
  476. if (PHASE_TRACE1(Js::AsmjsEntryPointInfoPhase))
  477. Output::Print(L"New Entrypoint is CheckAsmJsCodeGenThunk for function: %s\n", fn->GetDisplayName());
  478. }
  479. else
  480. #endif
  481. {
  482. fn->SetCheckCodeGenEntryPoint(entryPointInfo, NativeCodeGenerator::CheckCodeGenThunk);
  483. if (function != nullptr)
  484. {
  485. function->UpdateThunkEntryPoint(entryPointInfo, NativeCodeGenerator::CheckCodeGenThunk);
  486. }
  487. }
  488. JsFunctionCodeGen * workitem = workItemAutoPtr.Detach();
  489. workitem->SetEntryPointInfo(entryPointInfo);
  490. entryPointInfo->SetCodeGenPending(workitem);
  491. InterlockedIncrement(&pendingCodeGenWorkItems);
  492. if(!IS_PREJIT_ON())
  493. {
  494. workItems.LinkToEnd(workitem);
  495. return true;
  496. }
  497. const ExecutionMode prejitJitMode = PrejitJitMode(fn);
  498. workitem->SetJitMode(prejitJitMode);
  499. try
  500. {
  501. AddToJitQueue(workitem, /*prioritize*/ true, /*lock*/ true, function);
  502. }
  503. catch (...)
  504. {
  505. // Add the item back to the list if AddToJitQueue throws. The position in the list is not important.
  506. workitem->ResetJitMode();
  507. workItems.LinkToEnd(workitem);
  508. throw;
  509. }
  510. fn->TraceExecutionMode("Prejit (before)");
  511. if(prejitJitMode == ExecutionMode::SimpleJit)
  512. {
  513. fn->TransitionToSimpleJitExecutionMode();
  514. }
  515. else
  516. {
  517. Assert(prejitJitMode == ExecutionMode::FullJit);
  518. fn->TransitionToFullJitExecutionMode();
  519. }
  520. fn->TraceExecutionMode("Prejit");
  521. Processor()->PrioritizeJobAndWait(this, entryPointInfo, function);
  522. CheckCodeGenDone(fn, entryPointInfo, function);
  523. return true;
  524. #else
  525. return false;
  526. #endif
  527. }
  528. void NativeCodeGenerator::GenerateLoopBody(Js::FunctionBody * fn, Js::LoopHeader * loopHeader, Js::EntryPointInfo* entryPoint, uint localCount, Js::Var localSlots[])
  529. {
  530. ASSERT_THREAD();
  531. Assert(fn->GetScriptContext()->GetNativeCodeGenerator() == this);
  532. Assert(entryPoint->address == nullptr);
  533. #if DBG_DUMP
  534. if (PHASE_TRACE1(Js::JITLoopBodyPhase))
  535. {
  536. fn->DumpFunctionId(true);
  537. Output::Print(L": %-20s LoopBody Start Loop: %2d ByteCode: %4d (%4d,%4d)\n", fn->GetDisplayName(), fn->GetLoopNumber(loopHeader),
  538. loopHeader->endOffset - loopHeader->startOffset, loopHeader->startOffset, loopHeader->endOffset);
  539. Output::Flush();
  540. }
  541. #endif
  542. // If the parent function is JITted, no need to JIT this loop
  543. // CanReleaseLoopHeaders is a quick and dirty way of checking if the
  544. // function is currently being interpreted. If it is being interpreted,
  545. // We'd still like to jit the loop body.
  546. // We reset the interpretCount to 0 in case we switch back to the interpreter
  547. if (fn->GetNativeEntryPointUsed() && fn->GetCanReleaseLoopHeaders() && (!fn->GetIsAsmJsFunction() || !(loopHeader->GetCurrentEntryPointInfo()->GetIsTJMode())))
  548. {
  549. loopHeader->ResetInterpreterCount();
  550. return;
  551. }
  552. if (fn->GetIsAsmJsFunction())
  553. {
  554. Js::FunctionEntryPointInfo* functionEntryPointInfo = (Js::FunctionEntryPointInfo*) fn->GetDefaultEntryPointInfo();
  555. Js::LoopEntryPointInfo* loopEntryPointInfo = (Js::LoopEntryPointInfo*)entryPoint;
  556. loopEntryPointInfo->SetIsAsmJSFunction(true);
  557. loopEntryPointInfo->SetModuleAddress(functionEntryPointInfo->GetModuleAddress());
  558. }
  559. JsLoopBodyCodeGen * workitem = this->NewLoopBodyCodeGen(fn, entryPoint);
  560. if (!workitem)
  561. {
  562. // OOM, just skip this work item and return.
  563. return;
  564. }
  565. entryPoint->SetCodeGenPending(workitem);
  566. workitem->loopHeader = loopHeader;
  567. try
  568. {
  569. if (!fn->GetIsAsmJsFunction()) // not needed for asmjs as we don't profile in asm mode
  570. {
  571. const uint profiledRegBegin = fn->GetConstantCount();
  572. const uint profiledRegEnd = localCount;
  573. if (profiledRegBegin < profiledRegEnd)
  574. {
  575. workitem->symIdToValueTypeMap =
  576. HeapNew(JsLoopBodyCodeGen::SymIdToValueTypeMap, &HeapAllocator::Instance, profiledRegEnd - profiledRegBegin);
  577. Recycler *recycler = fn->GetScriptContext()->GetRecycler();
  578. for (uint i = profiledRegBegin; i < profiledRegEnd; i++)
  579. {
  580. if (localSlots[i] && IsValidVar(localSlots[i], recycler))
  581. {
  582. workitem->symIdToValueTypeMap->Add(i, ValueType::Uninitialized.Merge(localSlots[i]));
  583. }
  584. }
  585. }
  586. }
  587. workitem->SetJitMode(ExecutionMode::FullJit);
  588. AddToJitQueue(workitem, /*prioritize*/ true, /*lock*/ true);
  589. }
  590. catch (...)
  591. {
  592. // If adding to the JIT queue fails we need to revert the state of the entry point
  593. // and delete the work item
  594. entryPoint->RevertToNotScheduled();
  595. workitem->Delete();
  596. throw;
  597. }
  598. if (!Processor()->ProcessesInBackground() || fn->ForceJITLoopBody())
  599. {
  600. Processor()->PrioritizeJobAndWait(this, entryPoint);
  601. }
  602. }
  603. bool
  604. NativeCodeGenerator::IsValidVar(const Js::Var var, Recycler *const recycler)
  605. {
  606. using namespace Js;
  607. Assert(var);
  608. Assert(recycler);
  609. // We may be handling uninitialized memory here, need to ensure that each recycler-allocated object is valid before it is
  610. // read. Virtual functions shouldn't be called because the type ID may match by coincidence but the vtable can still be
  611. // invalid, even if it is deemed to be a "valid" object, since that only validates that the memory is still owned by the
  612. // recycler. This function validates the memory that ValueType::Merge(Var) reads.
  613. if(TaggedInt::Is(var))
  614. {
  615. return true;
  616. }
  617. #if FLOATVAR
  618. if(JavascriptNumber::Is_NoTaggedIntCheck(var))
  619. {
  620. return true;
  621. }
  622. #endif
  623. RecyclableObject *const recyclableObject = RecyclableObject::FromVar(var);
  624. if(!recycler->IsValidObject(recyclableObject, sizeof(*recyclableObject)))
  625. {
  626. return false;
  627. }
  628. INT_PTR vtable = VirtualTableInfoBase::GetVirtualTable(var);
  629. if (vtable <= USHRT_MAX || (vtable & 1))
  630. {
  631. // Don't have a vtable, is it not a var, may be a frame display?
  632. return false;
  633. }
  634. Type *const type = recyclableObject->GetType();
  635. if(!recycler->IsValidObject(type, sizeof(*type)))
  636. {
  637. return false;
  638. }
  639. #if !FLOATVAR
  640. if(JavascriptNumber::Is_NoTaggedIntCheck(var))
  641. {
  642. return true;
  643. }
  644. #endif
  645. const TypeId typeId = type->GetTypeId();
  646. if(typeId < static_cast<TypeId>(0))
  647. {
  648. return false;
  649. }
  650. if(!DynamicType::Is(typeId))
  651. {
  652. return true;
  653. }
  654. DynamicType *const dynamicType = static_cast<DynamicType *>(type);
  655. if(!recycler->IsValidObject(dynamicType, sizeof(*dynamicType)))
  656. {
  657. return false;
  658. }
  659. DynamicTypeHandler *const typeHandler = dynamicType->GetTypeHandler();
  660. if(!recycler->IsValidObject(typeHandler, sizeof(*typeHandler)))
  661. {
  662. return false;
  663. }
  664. // Not using DynamicObject::FromVar since there's a virtual call in there
  665. DynamicObject *const object = static_cast<DynamicObject *>(recyclableObject);
  666. if(!recycler->IsValidObject(object, sizeof(*object)))
  667. {
  668. return false;
  669. }
  670. if(typeId != TypeIds_Array)
  671. {
  672. ArrayObject* const objectArray = object->GetObjectArrayUnchecked();
  673. return objectArray == nullptr || recycler->IsValidObject(objectArray, sizeof(*objectArray));
  674. }
  675. // Not using JavascriptArray::FromVar since there's a virtual call in there
  676. JavascriptArray *const array = static_cast<JavascriptArray *>(object);
  677. if(!recycler->IsValidObject(array, sizeof(*array)))
  678. {
  679. return false;
  680. }
  681. return true;
  682. }
  683. #if ENABLE_DEBUG_CONFIG_OPTIONS
  684. volatile UINT_PTR NativeCodeGenerator::CodegenFailureSeed = 0;
  685. #endif
  686. void
  687. NativeCodeGenerator::CodeGen(PageAllocator * pageAllocator, CodeGenWorkItem* workItem, const bool foreground)
  688. {
  689. if(foreground)
  690. {
  691. // Func::Codegen has a lot of things on the stack, so probe the stack here instead
  692. PROBE_STACK(scriptContext, Js::Constants::MinStackJITCompile);
  693. }
  694. #if ENABLE_DEBUG_CONFIG_OPTIONS
  695. if (!foreground && Js::Configuration::Global.flags.IsEnabled(Js::InduceCodeGenFailureFlag))
  696. {
  697. if (NativeCodeGenerator::CodegenFailureSeed == 0)
  698. {
  699. // Initialize the seed
  700. NativeCodeGenerator::CodegenFailureSeed = Js::Configuration::Global.flags.InduceCodeGenFailureSeed;
  701. if (NativeCodeGenerator::CodegenFailureSeed == 0)
  702. {
  703. LARGE_INTEGER ctr;
  704. ::QueryPerformanceCounter(&ctr);
  705. NativeCodeGenerator::CodegenFailureSeed = ctr.HighPart ^ ctr.LowPart;
  706. srand((uint)NativeCodeGenerator::CodegenFailureSeed);
  707. }
  708. }
  709. int v = Math::Rand() % 100;
  710. if (v < Js::Configuration::Global.flags.InduceCodeGenFailure)
  711. {
  712. switch (v % 3)
  713. {
  714. case 0: Js::Throw::OutOfMemory(); break;
  715. case 1: throw Js::StackOverflowException(); break;
  716. case 2: throw Js::OperationAbortedException(); break;
  717. default:
  718. Assert(false);
  719. }
  720. }
  721. }
  722. #endif
  723. bool irviewerInstance = false;
  724. #ifdef IR_VIEWER
  725. irviewerInstance = true;
  726. #endif
  727. Assert(
  728. workItem->Type() != JsFunctionType ||
  729. irviewerInstance ||
  730. IsThunk(workItem->GetFunctionBody()->GetDirectEntryPoint(workItem->GetEntryPoint())) ||
  731. IsAsmJsCodeGenThunk(workItem->GetFunctionBody()->GetDirectEntryPoint(workItem->GetEntryPoint())));
  732. InterlockedExchangeAdd(&this->byteCodeSizeGenerated, workItem->GetByteCodeCount()); // must be interlocked because this data may be modified in the foreground and background thread concurrently
  733. Js::FunctionBody* body = workItem->GetFunctionBody();
  734. int nRegs = body->GetLocalsCount();
  735. AssertMsg((nRegs + 1) == (int)(SymID)(nRegs + 1), "SymID too small...");
  736. CodeGenAllocators *const allocators =
  737. foreground ? EnsureForegroundAllocators(pageAllocator) : GetBackgroundAllocator(pageAllocator); // okay to do outside lock since the respective function is called only from one thread
  738. Js::ScriptContextProfiler *const codeGenProfiler =
  739. #ifdef PROFILE_EXEC
  740. foreground ? EnsureForegroundCodeGenProfiler() : GetBackgroundCodeGenProfiler(pageAllocator); // okay to do outside lock since the respective function is called only from one thread
  741. #else
  742. nullptr;
  743. #endif
  744. NoRecoverMemoryJitArenaAllocator funcAlloc(L"BE-FuncAlloc", pageAllocator, Js::Throw::OutOfMemory);
  745. Js::ReadOnlyDynamicProfileInfo profileInfo(
  746. body->HasDynamicProfileInfo() ? body->GetAnyDynamicProfileInfo() : nullptr,
  747. foreground ? nullptr : &funcAlloc);
  748. bool rejit;
  749. ThreadContext *threadContext = scriptContext->GetThreadContext();
  750. double startTime = threadContext->JITTelemetry.Now();
  751. do
  752. {
  753. // the number allocator needs to be on the stack so that if we are doing foreground JIT
  754. // the chunk allocated from the recycler will be stacked pinned
  755. CodeGenNumberAllocator numberAllocator(
  756. foreground? nullptr : scriptContext->GetThreadContext()->GetCodeGenNumberThreadAllocator(),
  757. scriptContext->GetRecycler());
  758. Func *func =
  759. JitAnew(
  760. (&funcAlloc),
  761. Func,
  762. (&funcAlloc),
  763. workItem,
  764. nullptr,
  765. workItem->GetEntryPoint()->GetPolymorphicInlineCacheInfo()->GetSelfInfo(),
  766. allocators,
  767. &numberAllocator,
  768. &profileInfo,
  769. codeGenProfiler,
  770. !foreground);
  771. #if DBG_DUMP
  772. CurrentFunc = func;
  773. #endif
  774. func->m_symTable->SetStartingID(static_cast<SymID>(nRegs + 1));
  775. try
  776. {
  777. // Although we don't need to release the Arena memory, we need to invoke Func destructor.
  778. // Use an auto object for it. Put it here to ensure "func" is cleared whenever we have an
  779. // exception (RejitException or AbortException).
  780. AutoAllocatorObjectPtr<Func, JitArenaAllocator> autoFunc(func, &funcAlloc);
  781. func->Codegen();
  782. rejit = false;
  783. }
  784. catch(Js::RejitException ex)
  785. {
  786. // The work item needs to be rejitted, likely due to some optimization that was too aggressive
  787. if(ex.Reason() == RejitReason::AggressiveIntTypeSpecDisabled)
  788. {
  789. const bool isJitLoopBody = workItem->Type() == JsLoopBodyWorkItemType;
  790. profileInfo.DisableAggressiveIntTypeSpec(isJitLoopBody);
  791. if (body->HasDynamicProfileInfo())
  792. {
  793. body->GetAnyDynamicProfileInfo()->DisableAggressiveIntTypeSpec(isJitLoopBody);
  794. }
  795. }
  796. else if(ex.Reason() == RejitReason::InlineApplyDisabled)
  797. {
  798. body->SetDisableInlineApply(true);
  799. }
  800. else if(ex.Reason() == RejitReason::InlineSpreadDisabled)
  801. {
  802. body->SetDisableInlineSpread(true);
  803. }
  804. else if(ex.Reason() == RejitReason::DisableSwitchOptExpectingInteger ||
  805. ex.Reason() == RejitReason::DisableSwitchOptExpectingString)
  806. {
  807. profileInfo.DisableSwitchOpt();
  808. if(body->HasDynamicProfileInfo())
  809. {
  810. body->GetAnyDynamicProfileInfo()->DisableSwitchOpt();
  811. }
  812. }
  813. else
  814. {
  815. Assert(ex.Reason() == RejitReason::TrackIntOverflowDisabled);
  816. profileInfo.DisableTrackCompoundedIntOverflow();
  817. if(body->HasDynamicProfileInfo())
  818. {
  819. body->GetAnyDynamicProfileInfo()->DisableTrackCompoundedIntOverflow();
  820. }
  821. }
  822. if(PHASE_TRACE(Js::ReJITPhase, body))
  823. {
  824. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  825. Output::Print(
  826. L"Rejit (compile-time): function: %s (%s) reason: %S\n",
  827. body->GetDisplayName(),
  828. body->GetDebugNumberSet(debugStringBuffer),
  829. ex.ReasonName());
  830. }
  831. rejit = true;
  832. funcAlloc.Reset();
  833. if(!foreground)
  834. {
  835. profileInfo.OnBackgroundAllocatorReset();
  836. }
  837. }
  838. // Either the entry point has a reference to the number now, or we failed to code gen and we
  839. // don't need to numbers, we can flush the completed page now.
  840. //
  841. // If the number allocator is NULL then we are shutting down the thread context and so too the
  842. // code generator. The number allocator must be freed before the recycler (and thus before the
  843. // code generator) so we can't and don't need to flush it.
  844. CodeGenNumberThreadAllocator * threadNumberAllocator = this->scriptContext->GetThreadContext()->GetCodeGenNumberThreadAllocator();
  845. if (threadNumberAllocator != nullptr)
  846. {
  847. threadNumberAllocator->FlushAllocations();
  848. }
  849. } while(rejit);
  850. threadContext->JITTelemetry.LogTime(threadContext->JITTelemetry.Now() - startTime);
  851. #ifdef BGJIT_STATS
  852. // Must be interlocked because the following data may be modified from the background and foreground threads concurrently
  853. Js::ScriptContext *scriptContext = workItem->GetScriptContext();
  854. if (workItem->Type() == JsFunctionType)
  855. {
  856. InterlockedExchangeAdd(&scriptContext->bytecodeJITCount, workItem->GetByteCodeCount());
  857. InterlockedIncrement(&scriptContext->funcJITCount);
  858. }
  859. else if(workItem->Type() == JsLoopBodyWorkItemType)
  860. {
  861. InterlockedIncrement(&scriptContext->loopJITCount);
  862. }
  863. #endif
  864. }
  865. void NativeCodeGenerator::SetProfileMode(BOOL fSet)
  866. {
  867. this->SetNativeEntryPoint = fSet? Js::FunctionBody::ProfileSetNativeEntryPoint : Js::FunctionBody::DefaultSetNativeEntryPoint;
  868. }
  869. #if _M_IX86
  870. __declspec(naked)
  871. Js::Var
  872. NativeCodeGenerator::CheckAsmJsCodeGenThunk(Js::RecyclableObject* function, Js::CallInfo callInfo, ...)
  873. {
  874. __asm
  875. {
  876. push ebp
  877. mov ebp, esp
  878. push function
  879. call NativeCodeGenerator::CheckAsmJsCodeGen
  880. #ifdef _CONTROL_FLOW_GUARD
  881. // verify that the call target is valid
  882. push eax
  883. mov ecx, eax
  884. call[__guard_check_icall_fptr]
  885. pop eax
  886. #endif
  887. pop ebp
  888. jmp eax
  889. }
  890. }
  891. #elif _M_X64 || _M_ARM || _M_ARM64
  892. // Do nothing: the implementation of NativeCodeGenerator::CheckCodeGenThunk is declared (appropriately decorated) in
  893. // Backend\amd64\Thunks.asm and Backend\arm\Thunks.asm and Backend\arm64\Thunks.asm respectively.
  894. #else
  895. #error Not implemented.
  896. #endif
  897. #if _M_IX86
  898. __declspec(naked)
  899. Js::Var
  900. NativeCodeGenerator::CheckCodeGenThunk(Js::RecyclableObject* function, Js::CallInfo callInfo, ...)
  901. {
  902. __asm
  903. {
  904. push ebp
  905. mov ebp, esp
  906. push [esp+8]
  907. call NativeCodeGenerator::CheckCodeGen
  908. #ifdef _CONTROL_FLOW_GUARD
  909. // verify that the call target is valid
  910. push eax
  911. mov ecx, eax
  912. call[__guard_check_icall_fptr]
  913. pop eax
  914. #endif
  915. pop ebp
  916. jmp eax
  917. }
  918. }
  919. #elif _M_X64 || _M_ARM || _M_ARM64
  920. // Do nothing: the implementation of NativeCodeGenerator::CheckCodeGenThunk is declared (appropriately decorated) in
  921. // Backend\amd64\Thunks.asm and Backend\arm\Thunks.asm and Backend\arm64\Thunks.asm respectively.
  922. #else
  923. #error Not implemented.
  924. #endif
  925. bool
  926. NativeCodeGenerator::IsThunk(Js::JavascriptMethod codeAddress)
  927. {
  928. return codeAddress == NativeCodeGenerator::CheckCodeGenThunk;
  929. }
  930. bool
  931. NativeCodeGenerator::IsAsmJsCodeGenThunk(Js::JavascriptMethod codeAddress)
  932. {
  933. #ifdef ASMJS_PLAT
  934. return codeAddress == NativeCodeGenerator::CheckAsmJsCodeGenThunk;
  935. #else
  936. return false;
  937. #endif
  938. }
  939. CheckCodeGenFunction
  940. NativeCodeGenerator::GetCheckCodeGenFunction(Js::JavascriptMethod codeAddress)
  941. {
  942. if (codeAddress == NativeCodeGenerator::CheckCodeGenThunk)
  943. {
  944. return NativeCodeGenerator::CheckCodeGen;
  945. }
  946. return nullptr;
  947. }
  948. Js::Var
  949. NativeCodeGenerator::CheckAsmJsCodeGen(Js::ScriptFunction * function)
  950. {
  951. Assert(function);
  952. Js::FunctionBody *functionBody = function->GetFunctionBody();
  953. Js::ScriptContext *scriptContext = functionBody->GetScriptContext();
  954. NativeCodeGenerator *nativeCodeGen = scriptContext->GetNativeCodeGenerator();
  955. Assert(scriptContext->GetThreadContext()->IsScriptActive());
  956. Assert(scriptContext->GetThreadContext()->IsInScript());
  957. // Load the entry point here to validate it got changed afterwards
  958. Js::FunctionEntryPointInfo* entryPoint = function->GetFunctionEntryPointInfo();
  959. #if ENABLE_DEBUG_CONFIG_OPTIONS
  960. if ((PHASE_ON1(Js::AsmJsJITTemplatePhase) && CONFIG_FLAG(MaxTemplatizedJitRunCount) >= 0) || (!PHASE_ON1(Js::AsmJsJITTemplatePhase) && CONFIG_FLAG(MaxAsmJsInterpreterRunCount) >= 0))
  961. {
  962. nativeCodeGen->Processor()->PrioritizeJobAndWait(nativeCodeGen, entryPoint, function);
  963. } else
  964. #endif
  965. if (!nativeCodeGen->Processor()->PrioritizeJob(nativeCodeGen, entryPoint, function))
  966. {
  967. if (PHASE_TRACE1(Js::AsmjsEntryPointInfoPhase))
  968. {
  969. Output::Print(L"Codegen not done yet for function: %s, Entrypoint is CheckAsmJsCodeGenThunk\n", function->GetFunctionBody()->GetDisplayName());
  970. }
  971. return reinterpret_cast<Js::JavascriptMethod>(entryPoint->GetNativeAddress());
  972. }
  973. if (PHASE_TRACE1(Js::AsmjsEntryPointInfoPhase))
  974. {
  975. Output::Print(L"CodeGen Done for function: %s, Changing Entrypoint to Full JIT\n", function->GetFunctionBody()->GetDisplayName());
  976. }
  977. // we will need to set the functionbody external and asmjs entrypoint to the fulljit entrypoint
  978. return CheckCodeGenDone(functionBody, entryPoint, function);
  979. }
  980. Js::JavascriptMethod
  981. NativeCodeGenerator::CheckCodeGen(Js::ScriptFunction * function)
  982. {
  983. Assert(function);
  984. Assert(function->GetEntryPoint() == NativeCodeGenerator::CheckCodeGenThunk
  985. || Js::CrossSite::IsThunk(function->GetEntryPoint()));
  986. // We are not expecting non-deserialized functions here; Error if it hasn't been deserialized by this point
  987. Js::FunctionBody *functionBody = function->GetFunctionBody();
  988. Js::ScriptContext *scriptContext = functionBody->GetScriptContext();
  989. NativeCodeGenerator *nativeCodeGen = scriptContext->GetNativeCodeGenerator();
  990. Assert(scriptContext->GetThreadContext()->IsScriptActive());
  991. Assert(scriptContext->GetThreadContext()->IsInScript());
  992. // Load the entry point here to validate it got changed afterwards
  993. Js::JavascriptMethod originalEntryPoint = functionBody->GetOriginalEntryPoint();
  994. Js::FunctionEntryPointInfo* entryPoint = function->GetFunctionEntryPointInfo();
  995. Js::FunctionEntryPointInfo *const defaultEntryPointInfo = functionBody->GetDefaultFunctionEntryPointInfo();
  996. if(entryPoint != defaultEntryPointInfo)
  997. {
  998. // Switch to the latest entry point info
  999. function->UpdateThunkEntryPoint(defaultEntryPointInfo, functionBody->GetDirectEntryPoint(defaultEntryPointInfo));
  1000. const Js::JavascriptMethod defaultDirectEntryPoint = functionBody->GetDirectEntryPoint(defaultEntryPointInfo);
  1001. if(!IsThunk(defaultDirectEntryPoint))
  1002. {
  1003. return defaultDirectEntryPoint;
  1004. }
  1005. entryPoint = defaultEntryPointInfo;
  1006. }
  1007. // If a transition to JIT needs to be forced, JIT right away
  1008. if(Js::Configuration::Global.flags.EnforceExecutionModeLimits &&
  1009. functionBody->GetExecutionMode() != ExecutionMode::SimpleJit &&
  1010. functionBody->TryTransitionToJitExecutionMode())
  1011. {
  1012. nativeCodeGen->Processor()->PrioritizeJobAndWait(nativeCodeGen, entryPoint, function);
  1013. return CheckCodeGenDone(functionBody, entryPoint, function);
  1014. }
  1015. if(!nativeCodeGen->Processor()->PrioritizeJob(nativeCodeGen, entryPoint, function))
  1016. {
  1017. // Job was not yet processed
  1018. // originalEntryPoint is the last known good entry point for the function body. Here we verify that
  1019. // it either corresponds with this codegen episode (identified by function->entryPointIndex) of the function body
  1020. // or one that was scheduled after. In the latter case originalEntryPoint will get updated if and when
  1021. // that last episode completes successfully.
  1022. Assert(functionBody->GetDefaultEntryPointInfo() == function->GetEntryPointInfo() &&
  1023. (
  1024. originalEntryPoint == DefaultEntryThunk
  1025. || scriptContext->IsDynamicInterpreterThunk(originalEntryPoint)
  1026. || originalEntryPoint == ProfileDeferredParsingThunk
  1027. || originalEntryPoint == DefaultDeferredParsingThunk
  1028. || (
  1029. functionBody->GetSimpleJitEntryPointInfo() &&
  1030. originalEntryPoint ==
  1031. reinterpret_cast<Js::JavascriptMethod>(functionBody->GetSimpleJitEntryPointInfo()->GetNativeAddress())
  1032. )
  1033. ) ||
  1034. functionBody->GetDefaultFunctionEntryPointInfo()->entryPointIndex > function->GetFunctionEntryPointInfo()->entryPointIndex);
  1035. return (scriptContext->CurrentThunk == ProfileEntryThunk) ? ProfileEntryThunk : originalEntryPoint;
  1036. }
  1037. return CheckCodeGenDone(functionBody, entryPoint, function);
  1038. }
  1039. Js::JavascriptMethod
  1040. NativeCodeGenerator::CheckCodeGenDone(
  1041. Js::FunctionBody *const functionBody,
  1042. Js::FunctionEntryPointInfo *const entryPointInfo,
  1043. Js::ScriptFunction * function)
  1044. {
  1045. Assert(!function || function->GetFunctionBody() == functionBody);
  1046. Assert(!function || function->GetFunctionEntryPointInfo() == entryPointInfo);
  1047. // Job was processed or failed and cleaned up
  1048. // We won't call CheckCodeGenDone if the job is still pending since
  1049. // PrioritizeJob will return false
  1050. Assert(entryPointInfo->IsCodeGenDone() || entryPointInfo->IsCleanedUp() || entryPointInfo->IsPendingCleanup());
  1051. if (!functionBody->GetHasBailoutInstrInJittedCode() && functionBody->GetHasAllocatedLoopHeaders() && (!functionBody->GetIsAsmJsFunction() || !(((Js::FunctionEntryPointInfo*)functionBody->GetDefaultEntryPointInfo())->GetIsTJMode())))
  1052. {
  1053. if (functionBody->GetCanReleaseLoopHeaders())
  1054. {
  1055. functionBody->ReleaseLoopHeaders();
  1056. }
  1057. else
  1058. {
  1059. functionBody->SetPendingLoopHeaderRelease(true);
  1060. }
  1061. }
  1062. Js::ScriptContext *scriptContext = functionBody->GetScriptContext();
  1063. if (!functionBody->GetNativeEntryPointUsed())
  1064. {
  1065. #ifdef BGJIT_STATS
  1066. scriptContext->jitCodeUsed += functionBody->GetByteCodeCount();
  1067. scriptContext->funcJitCodeUsed++;
  1068. #endif
  1069. functionBody->SetNativeEntryPointUsed(true);
  1070. }
  1071. // Replace the entry point
  1072. Js::JavascriptMethod address;
  1073. if (!entryPointInfo->IsCodeGenDone())
  1074. {
  1075. if (entryPointInfo->IsPendingCleanup())
  1076. {
  1077. entryPointInfo->Cleanup(false /* isShutdown */, true /* capture cleanup stack */);
  1078. }
  1079. address = functionBody->GetScriptContext()->CurrentThunk == ProfileEntryThunk ? ProfileEntryThunk : functionBody->GetOriginalEntryPoint();
  1080. entryPointInfo->address = address;
  1081. }
  1082. else
  1083. {
  1084. scriptContext->GetNativeCodeGenerator()->SetNativeEntryPoint(
  1085. entryPointInfo,
  1086. functionBody,
  1087. reinterpret_cast<Js::JavascriptMethod>(entryPointInfo->GetNativeAddress()));
  1088. address = (Js::JavascriptMethod) entryPointInfo->address;
  1089. Assert(!functionBody->NeedEnsureDynamicProfileInfo() || address == Js::DynamicProfileInfo::EnsureDynamicProfileInfoThunk);
  1090. }
  1091. Assert(!IsThunk(address));
  1092. if(function)
  1093. {
  1094. function->UpdateThunkEntryPoint(entryPointInfo, address);
  1095. }
  1096. // call the direct entry point, which will ensure dynamic profile info if necessary
  1097. return address;
  1098. }
  1099. CodeGenWorkItem *
  1100. NativeCodeGenerator::GetJob(Js::EntryPointInfo * const entryPoint) const
  1101. {
  1102. ASSERT_THREAD();
  1103. Assert(entryPoint);
  1104. return entryPoint->GetWorkItem();
  1105. }
  1106. bool
  1107. NativeCodeGenerator::WasAddedToJobProcessor(JsUtil::Job *const job) const
  1108. {
  1109. // This function is called from inside the lock
  1110. ASSERT_THREAD();
  1111. Assert(job);
  1112. return static_cast<CodeGenWorkItem *>(job)->IsInJitQueue();
  1113. }
  1114. bool
  1115. NativeCodeGenerator::ShouldProcessInForeground(const bool willWaitForJob, const unsigned int numJobsInQueue) const
  1116. {
  1117. // This function is called from inside the lock
  1118. ASSERT_THREAD();
  1119. // Process the job synchronously in the foreground thread if we're waiting for the job to be processed, or if the background
  1120. // job queue is long enough and this native code generator is optimized for many instances (web workers)
  1121. return
  1122. willWaitForJob ||
  1123. (numJobsInQueue > (uint)CONFIG_FLAG(HybridFgJitBgQueueLengthThreshold) &&
  1124. (CONFIG_FLAG(HybridFgJit) || isOptimizedForManyInstances));
  1125. }
  1126. void
  1127. NativeCodeGenerator::PrioritizedButNotYetProcessed(JsUtil::Job *const job)
  1128. {
  1129. // This function is called from inside the lock
  1130. ASSERT_THREAD();
  1131. Assert(job);
  1132. #ifdef BGJIT_STATS
  1133. CodeGenWorkItem *const codeGenWorkItem = static_cast<CodeGenWorkItem *>(job);
  1134. if(codeGenWorkItem->Type() == JsFunctionType && codeGenWorkItem->IsInJitQueue())
  1135. {
  1136. codeGenWorkItem->GetScriptContext()->interpretedCallsHighPri++;
  1137. if(codeGenWorkItem->GetJitMode() == ExecutionMode::FullJit)
  1138. {
  1139. QueuedFullJitWorkItem *const queuedFullJitWorkItem = codeGenWorkItem->GetQueuedFullJitWorkItem();
  1140. if(queuedFullJitWorkItem)
  1141. {
  1142. queuedFullJitWorkItems.MoveToBeginning(queuedFullJitWorkItem);
  1143. }
  1144. }
  1145. }
  1146. #endif
  1147. }
  1148. void
  1149. NativeCodeGenerator::BeforeWaitForJob(Js::EntryPointInfo *const entryPoint) const
  1150. {
  1151. ASSERT_THREAD();
  1152. Assert(entryPoint);
  1153. #ifdef PROFILE_EXEC
  1154. ProfileBegin(this->foregroundCodeGenProfiler, Js::DelayPhase);
  1155. #endif
  1156. }
  1157. void
  1158. NativeCodeGenerator::AfterWaitForJob(Js::EntryPointInfo *const entryPoint) const
  1159. {
  1160. ASSERT_THREAD();
  1161. Assert(entryPoint);
  1162. #ifdef PROFILE_EXEC
  1163. ProfileEnd(this->foregroundCodeGenProfiler, Js::DelayPhase);
  1164. #endif
  1165. }
  1166. /*
  1167. * A workitem exceeds JIT limits if we've already generated MaxThreadJITCodeHeapSize
  1168. * (currently 7 MB) of code on this thread or MaxProcessJITCodeHeapSize (currently 55 MB)
  1169. * in the process. In real world websites we rarely (if at all) hit this limit.
  1170. * Also, if this workitem's byte code size is in excess of MaxJITFunctionBytecodeSize instructions,
  1171. * it exceeds the JIT limits
  1172. */
  1173. bool
  1174. NativeCodeGenerator::WorkItemExceedsJITLimits(CodeGenWorkItem *const codeGenWork)
  1175. {
  1176. return
  1177. (codeGenWork->GetScriptContext()->GetThreadContext()->GetCodeSize() >= Js::Constants::MaxThreadJITCodeHeapSize) ||
  1178. (ThreadContext::GetProcessCodeSize() >= Js::Constants::MaxProcessJITCodeHeapSize) ||
  1179. (codeGenWork->GetByteCodeCount() >= (uint)CONFIG_FLAG(MaxJITFunctionBytecodeSize));
  1180. }
  1181. bool
  1182. NativeCodeGenerator::Process(JsUtil::Job *const job, JsUtil::ParallelThreadData *threadData)
  1183. {
  1184. const bool foreground = !threadData;
  1185. PageAllocator *pageAllocator;
  1186. if (foreground)
  1187. {
  1188. pageAllocator = scriptContext->GetThreadContext()->GetPageAllocator();
  1189. }
  1190. else
  1191. {
  1192. pageAllocator = threadData->GetPageAllocator();
  1193. }
  1194. CodeGenWorkItem *const codeGenWork = static_cast<CodeGenWorkItem *>(job);
  1195. switch (codeGenWork->Type())
  1196. {
  1197. case JsLoopBodyWorkItemType:
  1198. {
  1199. JsLoopBodyCodeGen* loopBodyCodeGenWorkItem = (JsLoopBodyCodeGen*)codeGenWork;
  1200. Js::FunctionBody* fn = loopBodyCodeGenWorkItem->GetFunctionBody();
  1201. if (fn->GetNativeEntryPointUsed() && fn->GetCanReleaseLoopHeaders() && (!fn->GetIsAsmJsFunction() || !(loopBodyCodeGenWorkItem->loopHeader->GetCurrentEntryPointInfo()->GetIsTJMode())))
  1202. {
  1203. loopBodyCodeGenWorkItem->loopHeader->ResetInterpreterCount();
  1204. return false;
  1205. }
  1206. // Unless we're in a ForceNative configuration, ignore this workitem if it exceeds JIT limits
  1207. if (fn->ForceJITLoopBody() || !WorkItemExceedsJITLimits(codeGenWork))
  1208. {
  1209. CodeGen(pageAllocator, codeGenWork, foreground);
  1210. return true;
  1211. }
  1212. Js::EntryPointInfo * entryPoint = loopBodyCodeGenWorkItem->GetEntryPoint();
  1213. entryPoint->SetJITCapReached();
  1214. return false;
  1215. }
  1216. case JsFunctionType:
  1217. {
  1218. // Unless we're in a ForceNative configuration, ignore this workitem if it exceeds JIT limits
  1219. if (IS_PREJIT_ON() || Js::Configuration::Global.flags.ForceNative || !WorkItemExceedsJITLimits(codeGenWork))
  1220. {
  1221. CodeGen(pageAllocator, codeGenWork, foreground);
  1222. return true;
  1223. }
  1224. return false;
  1225. }
  1226. default:
  1227. Assume(UNREACHED);
  1228. }
  1229. return false;
  1230. }
  1231. void
  1232. NativeCodeGenerator::Prioritize(JsUtil::Job *const job, const bool forceAddJobToProcessor, void* function)
  1233. {
  1234. // This function is called from inside the lock
  1235. ASSERT_THREAD();
  1236. Assert(job);
  1237. Assert(static_cast<const CodeGenWorkItem *>(job)->Type() == CodeGenWorkItemType::JsFunctionType);
  1238. Assert(!WasAddedToJobProcessor(job));
  1239. JsFunctionCodeGen *const workItem = static_cast<JsFunctionCodeGen *>(job);
  1240. Js::FunctionBody *const functionBody = workItem->GetFunctionBody();
  1241. Assert(workItem->GetEntryPoint() == functionBody->GetDefaultFunctionEntryPointInfo());
  1242. ExecutionMode jitMode;
  1243. if (functionBody->GetIsAsmjsMode())
  1244. {
  1245. jitMode = ExecutionMode::FullJit;
  1246. functionBody->SetExecutionMode(ExecutionMode::FullJit);
  1247. }
  1248. else
  1249. {
  1250. if(!forceAddJobToProcessor && !functionBody->TryTransitionToJitExecutionMode())
  1251. {
  1252. return;
  1253. }
  1254. jitMode = functionBody->GetExecutionMode();
  1255. Assert(jitMode == ExecutionMode::SimpleJit || jitMode == ExecutionMode::FullJit);
  1256. }
  1257. workItems.Unlink(workItem);
  1258. workItem->SetJitMode(jitMode);
  1259. try
  1260. {
  1261. // Prioritize full JIT work items over simple JIT work items. This simple solution seems sufficient for now, but it
  1262. // might be better to use a priority queue if it becomes necessary to prioritize recent simple JIT work items relative
  1263. // to the older simple JIT work items.
  1264. AddToJitQueue(
  1265. workItem,
  1266. jitMode == ExecutionMode::FullJit || queuedFullJitWorkItemCount == 0 /* prioritize */,
  1267. false /* lock */,
  1268. function);
  1269. }
  1270. catch (...)
  1271. {
  1272. // Add the item back to the list if AddToJitQueue throws. The position in the list is not important.
  1273. workItem->ResetJitMode();
  1274. workItems.LinkToEnd(workItem);
  1275. throw;
  1276. }
  1277. }
  1278. bool
  1279. NativeCodeGenerator::IsInDebugMode() const
  1280. {
  1281. return this->scriptContext->IsInDebugMode();
  1282. }
  1283. ExecutionMode NativeCodeGenerator::PrejitJitMode(Js::FunctionBody *const functionBody)
  1284. {
  1285. Assert(IS_PREJIT_ON());
  1286. Assert(functionBody->DoSimpleJit() || functionBody->DoFullJit());
  1287. // Prefer full JIT for prejitting unless it's off or simple JIT is forced
  1288. return
  1289. functionBody->DoFullJit() && !(PHASE_FORCE(Js::Phase::SimpleJitPhase, functionBody) && functionBody->DoSimpleJit())
  1290. ? ExecutionMode::FullJit
  1291. : ExecutionMode::SimpleJit;
  1292. }
  1293. void
  1294. NativeCodeGenerator::UpdateQueueForDebugMode()
  1295. {
  1296. Assert(!this->hasUpdatedQForDebugMode);
  1297. // If we're going to debug mode, drain the job processors queue of
  1298. // all jobs belonging this native code generator
  1299. // JobProcessed will be called for existing jobs, and in debug mode
  1300. // that method will simply add them back to the NativeCodeGen's queue
  1301. Processor()->RemoveManager(this);
  1302. this->hasUpdatedQForDebugMode = true;
  1303. if (Js::Configuration::Global.EnableJitInDebugMode())
  1304. {
  1305. Processor()->AddManager(this);
  1306. }
  1307. }
  1308. void
  1309. NativeCodeGenerator::JobProcessed(JsUtil::Job *const job, const bool succeeded)
  1310. {
  1311. // This function is called from inside the lock
  1312. Assert(job);
  1313. CodeGenWorkItem *workItem = static_cast<CodeGenWorkItem *>(job);
  1314. class AutoCleanup
  1315. {
  1316. private:
  1317. Js::ScriptContext *const scriptContext;
  1318. Js::CodeGenRecyclableData *const recyclableData;
  1319. public:
  1320. AutoCleanup(Js::ScriptContext *const scriptContext, Js::CodeGenRecyclableData *const recyclableData)
  1321. : scriptContext(scriptContext), recyclableData(recyclableData)
  1322. {
  1323. Assert(scriptContext);
  1324. }
  1325. ~AutoCleanup()
  1326. {
  1327. if(recyclableData)
  1328. {
  1329. scriptContext->GetThreadContext()->UnregisterCodeGenRecyclableData(recyclableData);
  1330. }
  1331. }
  1332. } autoCleanup(scriptContext, workItem->RecyclableData());
  1333. const ExecutionMode jitMode = workItem->GetJitMode();
  1334. if(jitMode == ExecutionMode::FullJit && workItem->IsInJitQueue())
  1335. {
  1336. QueuedFullJitWorkItem *const queuedFullJitWorkItem = workItem->GetQueuedFullJitWorkItem();
  1337. if(queuedFullJitWorkItem)
  1338. {
  1339. queuedFullJitWorkItems.Unlink(queuedFullJitWorkItem);
  1340. --queuedFullJitWorkItemCount;
  1341. }
  1342. }
  1343. Js::FunctionBody* functionBody = nullptr;
  1344. CodeGenWorkItemType workitemType = workItem->Type();
  1345. if (workitemType == JsFunctionType)
  1346. {
  1347. JsFunctionCodeGen * functionCodeGen = (JsFunctionCodeGen *)workItem;
  1348. functionBody = functionCodeGen->GetFunctionBody();
  1349. if (succeeded)
  1350. {
  1351. Js::FunctionEntryPointInfo* entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(functionCodeGen->GetEntryPoint());
  1352. entryPointInfo->SetJitMode(jitMode);
  1353. Assert(workItem->GetCodeAddress() != NULL);
  1354. entryPointInfo->SetCodeGenDone();
  1355. }
  1356. else
  1357. {
  1358. #if DBG
  1359. functionBody->m_nativeEntryPointIsInterpreterThunk = true;
  1360. #endif
  1361. // It's okay if the entry point has been reclaimed at this point
  1362. // since the job failed anyway so the entry point should never get used
  1363. // If it's still around, clean it up. If not, its finalizer would clean
  1364. // it up anyway.
  1365. Js::EntryPointInfo* entryPointInfo = functionCodeGen->GetEntryPoint();
  1366. if (entryPointInfo)
  1367. {
  1368. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1369. switch (job->failureReason)
  1370. {
  1371. case Job::FailureReason::OOM: entryPointInfo->SetCleanupReason(Js::EntryPointInfo::CodeGenFailedOOM); break;
  1372. case Job::FailureReason::StackOverflow: entryPointInfo->SetCleanupReason(Js::EntryPointInfo::CodeGenFailedStackOverflow); break;
  1373. case Job::FailureReason::Aborted: entryPointInfo->SetCleanupReason(Js::EntryPointInfo::CodeGenFailedAborted); break;
  1374. default: Assert(job->failureReason == Job::FailureReason::NotFailed);
  1375. }
  1376. #endif
  1377. entryPointInfo->SetPendingCleanup();
  1378. }
  1379. functionCodeGen->OnWorkItemProcessFail(this);
  1380. }
  1381. InterlockedDecrement(&pendingCodeGenWorkItems);
  1382. HeapDelete(functionCodeGen);
  1383. }
  1384. else if (workitemType == JsLoopBodyWorkItemType)
  1385. {
  1386. JsLoopBodyCodeGen * loopBodyCodeGen = (JsLoopBodyCodeGen*)workItem;
  1387. functionBody = loopBodyCodeGen->GetFunctionBody();
  1388. Js::EntryPointInfo * entryPoint = loopBodyCodeGen->GetEntryPoint();
  1389. if (succeeded)
  1390. {
  1391. Assert(workItem->GetCodeAddress() != NULL);
  1392. functionBody->SetLoopBodyEntryPoint(loopBodyCodeGen->loopHeader, loopBodyCodeGen->GetEntryPoint(), (Js::JavascriptMethod)workItem->GetCodeAddress());
  1393. entryPoint->SetCodeGenDone();
  1394. }
  1395. else
  1396. {
  1397. // We re-use failed loop body entry points.
  1398. // The loop body entry point could have been cleaned up if the parent function JITed,
  1399. // in which case we don't want to reset it.
  1400. if (entryPoint && !entryPoint->IsCleanedUp())
  1401. {
  1402. entryPoint->Reset(!entryPoint->IsJITCapReached()); // reset state to NotScheduled if JIT cap hasn't been reached
  1403. }
  1404. loopBodyCodeGen->OnWorkItemProcessFail(this);
  1405. }
  1406. HeapDelete(loopBodyCodeGen);
  1407. }
  1408. else
  1409. {
  1410. AssertMsg(false, "Unknown work item type");
  1411. AssertMsg(workItem->GetCodeAddress() == NULL, "No other types should have native entry point for now.");
  1412. }
  1413. }
  1414. JsUtil::Job *
  1415. NativeCodeGenerator::GetJobToProcessProactively()
  1416. {
  1417. ASSERT_THREAD();
  1418. // Look for work, starting with high priority items first, and above LowPri
  1419. CodeGenWorkItem* workItem = workItems.Head();
  1420. while(workItem != nullptr)
  1421. {
  1422. if(workItem->ShouldSpeculativelyJit(this->byteCodeSizeGenerated))
  1423. {
  1424. workItem->SetJitMode(ExecutionMode::FullJit);
  1425. // Note: This gives a perf regression in fre build, but it is useful for debugging and won't be there for the final build
  1426. // anyway, so I left it in.
  1427. if (PHASE_TRACE(Js::DelayPhase, workItem->GetFunctionBody())) {
  1428. OUTPUT_TRACE(Js::DelayPhase, L"ScriptContext: 0x%p, Speculative JIT: %-25s, Byte code generated: %d \n",
  1429. this->scriptContext, workItem->GetFunctionBody()->GetExternalDisplayName(), this->byteCodeSizeGenerated);
  1430. }
  1431. Js::FunctionBody *fn = workItem->GetFunctionBody();
  1432. Js::EntryPointInfo *entryPoint = workItem->GetEntryPoint();
  1433. const auto recyclableData = GatherCodeGenData(fn, fn, entryPoint, workItem);
  1434. workItems.Unlink(workItem);
  1435. workItem->SetRecyclableData(recyclableData);
  1436. {
  1437. AutoOptionalCriticalSection lock(Processor()->GetCriticalSection());
  1438. scriptContext->GetThreadContext()->RegisterCodeGenRecyclableData(recyclableData);
  1439. }
  1440. #ifdef BGJIT_STATS
  1441. scriptContext->speculativeJitCount++;
  1442. #endif
  1443. QueuedFullJitWorkItem *const queuedFullJitWorkItem = workItem->EnsureQueuedFullJitWorkItem();
  1444. if(queuedFullJitWorkItem) // ignore OOM, this work item just won't be removed from the job processor's queue
  1445. {
  1446. queuedFullJitWorkItems.LinkToBeginning(queuedFullJitWorkItem);
  1447. ++queuedFullJitWorkItemCount;
  1448. }
  1449. workItem->OnAddToJitQueue();
  1450. workItem->GetFunctionBody()->TraceExecutionMode("SpeculativeJit (before)");
  1451. workItem->GetFunctionBody()->TransitionToFullJitExecutionMode();
  1452. workItem->GetFunctionBody()->TraceExecutionMode("SpeculativeJit");
  1453. break;
  1454. }
  1455. workItem = static_cast<CodeGenWorkItem*>(workItem->Next());
  1456. }
  1457. return workItem;
  1458. }
  1459. // Removes all of the proactive jobs from the generator. Used when switching between attached/detached
  1460. // debug modes in order to drain the queue of jobs (since we switch from interpreted to native and back).
  1461. void
  1462. NativeCodeGenerator::RemoveProactiveJobs()
  1463. {
  1464. CodeGenWorkItem* workItem = workItems.Head();
  1465. while (workItem)
  1466. {
  1467. CodeGenWorkItem* temp = static_cast<CodeGenWorkItem*>(workItem->Next());
  1468. workItem->Delete();
  1469. workItem = temp;
  1470. }
  1471. workItems.Clear();
  1472. //for(JsUtil::Job *job = workItems.Head(); job;)
  1473. //{
  1474. // JsUtil::Job *const next = job->Next();
  1475. // JobProcessed(job, /*succeeded*/ false);
  1476. // job = next;
  1477. //}
  1478. }
  1479. template<bool IsInlinee>
  1480. void
  1481. NativeCodeGenerator::GatherCodeGenData(
  1482. Recycler *const recycler,
  1483. Js::FunctionBody *const topFunctionBody,
  1484. Js::FunctionBody *const functionBody,
  1485. Js::EntryPointInfo *const entryPoint,
  1486. InliningDecider &inliningDecider,
  1487. ObjTypeSpecFldInfoList *objTypeSpecFldInfoList,
  1488. Js::FunctionCodeGenJitTimeData *const jitTimeData,
  1489. Js::FunctionCodeGenRuntimeData *const runtimeData,
  1490. Js::JavascriptFunction* function,
  1491. bool isJitTimeDataComputed,
  1492. uint32 recursiveInlineDepth)
  1493. {
  1494. ASSERT_THREAD();
  1495. Assert(recycler);
  1496. Assert(functionBody);
  1497. Assert(jitTimeData);
  1498. Assert(IsInlinee == !!runtimeData);
  1499. Assert(!IsInlinee || !inliningDecider.GetIsLoopBody());
  1500. Assert(topFunctionBody != nullptr && (!entryPoint->GetWorkItem() || entryPoint->GetWorkItem()->GetFunctionBody() == topFunctionBody));
  1501. Assert(objTypeSpecFldInfoList != nullptr);
  1502. #ifdef FIELD_ACCESS_STATS
  1503. jitTimeData->EnsureInlineCacheStats(recycler);
  1504. #define SetInlineCacheCount(counter, value) jitTimeData->inlineCacheStats->counter = value;
  1505. #define IncInlineCacheCount(counter) if(!isJitTimeDataComputed) {jitTimeData->inlineCacheStats->counter++;}
  1506. #define AddInlineCacheStats(callerData, inlineeData) callerData->AddInlineeInlineCacheStats(inlineeData);
  1507. #define InlineCacheStatsArg(jitTimeData) !isJitTimeDataComputed ? jitTimeData->inlineCacheStats : nullptr
  1508. #else
  1509. #define SetInlineCacheCount(counter, value)
  1510. #define IncInlineCacheCount(counter)
  1511. #define AddInlineCacheStats(callerData, inlineeData)
  1512. #define InlineCacheStatsArg(jitTimeData) nullptr
  1513. #endif
  1514. #if DBG
  1515. Assert(
  1516. PHASE_ON(Js::Phase::SimulatePolyCacheWithOneTypeForFunctionPhase, functionBody) ==
  1517. CONFIG_ISENABLED(Js::Flag::SimulatePolyCacheWithOneTypeForInlineCacheIndexFlag));
  1518. if(PHASE_ON(Js::Phase::SimulatePolyCacheWithOneTypeForFunctionPhase, functionBody))
  1519. {
  1520. const Js::InlineCacheIndex inlineCacheIndex = CONFIG_FLAG(SimulatePolyCacheWithOneTypeForInlineCacheIndex);
  1521. functionBody->CreateNewPolymorphicInlineCache(
  1522. inlineCacheIndex,
  1523. functionBody->GetPropertyIdFromCacheId(inlineCacheIndex),
  1524. functionBody->GetInlineCache(inlineCacheIndex));
  1525. if(functionBody->HasDynamicProfileInfo())
  1526. {
  1527. functionBody->GetAnyDynamicProfileInfo()->RecordPolymorphicFieldAccess(functionBody, inlineCacheIndex);
  1528. }
  1529. }
  1530. #endif
  1531. if(IsInlinee)
  1532. {
  1533. // This function is recursive
  1534. PROBE_STACK(scriptContext, Js::Constants::MinStackDefault);
  1535. }
  1536. else
  1537. {
  1538. //TryAggressiveInlining adjusts inlining heuristics and walks the call tree. If it can inlining everything it will set the InliningThreshold to be aggressive.
  1539. if (!inliningDecider.GetIsLoopBody())
  1540. {
  1541. uint32 inlineeCount = 0;
  1542. if (!PHASE_OFF(Js::TryAggressiveInliningPhase, topFunctionBody))
  1543. {
  1544. Assert(topFunctionBody == functionBody);
  1545. inliningDecider.SetAggressiveHeuristics();
  1546. if (!TryAggressiveInlining(topFunctionBody, functionBody, inliningDecider, inlineeCount, 0))
  1547. {
  1548. uint countOfInlineesWithLoops = inliningDecider.getNumberOfInlineesWithLoop();
  1549. //TryAggressiveInlining failed, set back to default heuristics.
  1550. inliningDecider.ResetInlineHeuristics();
  1551. inliningDecider.SetLimitOnInlineesWithLoop(countOfInlineesWithLoops);
  1552. }
  1553. else
  1554. {
  1555. jitTimeData->SetIsAggressiveInliningEnabled();
  1556. }
  1557. inliningDecider.ResetState();
  1558. }
  1559. }
  1560. entryPoint->EnsurePolymorphicInlineCacheInfo(recycler, functionBody);
  1561. }
  1562. entryPoint->EnsureJitTransferData(recycler);
  1563. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1564. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1565. #endif
  1566. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1567. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_VERBOSE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  1568. {
  1569. Output::Print(L"ObjTypeSpec: top function %s (%s), function %s (%s): GatherCodeGenData(): \n",
  1570. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer));
  1571. Output::Flush();
  1572. }
  1573. #endif
  1574. const auto profileData =
  1575. functionBody->HasDynamicProfileInfo()
  1576. ? functionBody->GetAnyDynamicProfileInfo()
  1577. : functionBody->EnsureDynamicProfileInfo();
  1578. bool inlineGetterSetter = false;
  1579. bool inlineApplyTarget = false; //to indicate whether we can inline apply target or not.
  1580. bool inlineCallTarget = false;
  1581. if(profileData)
  1582. {
  1583. if (!IsInlinee)
  1584. {
  1585. PHASE_PRINT_TRACE(
  1586. Js::ObjTypeSpecPhase, functionBody,
  1587. L"Objtypespec (%s): Pending cache state on add %x to JIT queue: %d\n",
  1588. functionBody->GetDebugNumberSet(debugStringBuffer), entryPoint, profileData->GetPolymorphicCacheState());
  1589. entryPoint->SetPendingPolymorphicCacheState(profileData->GetPolymorphicCacheState());
  1590. entryPoint->SetPendingInlinerVersion(profileData->GetInlinerVersion());
  1591. entryPoint->SetPendingImplicitCallFlags(profileData->GetImplicitCallFlags());
  1592. }
  1593. if (functionBody->GetProfiledArrayCallSiteCount() != 0)
  1594. {
  1595. RecyclerWeakReference<Js::FunctionBody> *weakFuncRef = recycler->CreateWeakReferenceHandle(functionBody);
  1596. if (!isJitTimeDataComputed)
  1597. {
  1598. jitTimeData->SetWeakFuncRef(weakFuncRef);
  1599. }
  1600. entryPoint->AddWeakFuncRef(weakFuncRef, recycler);
  1601. }
  1602. #ifdef ENABLE_DEBUG_CONFIG_OPTIONS
  1603. if (PHASE_VERBOSE_TESTTRACE(Js::ObjTypeSpecPhase, functionBody) ||
  1604. PHASE_VERBOSE_TRACE1(Js::PolymorphicInlineCachePhase))
  1605. {
  1606. if (functionBody->GetInlineCacheCount() > 0)
  1607. {
  1608. if (!IsInlinee)
  1609. {
  1610. Output::Print(L"-----------------------------------------------------------------------------\n");
  1611. }
  1612. else
  1613. {
  1614. Output::Print(L"\tInlinee:\t");
  1615. }
  1616. functionBody->DumpFullFunctionName();
  1617. Output::Print(L"\n");
  1618. }
  1619. }
  1620. #endif
  1621. SetInlineCacheCount(totalInlineCacheCount, functionBody->GetInlineCacheCount());
  1622. Assert(functionBody->GetProfiledFldCount() == functionBody->GetInlineCacheCount()); // otherwise, isInst inline caches need to be cloned
  1623. for(uint i = 0; i < functionBody->GetInlineCacheCount(); ++i)
  1624. {
  1625. const auto cacheType = profileData->GetFldInfo(functionBody, i)->flags;
  1626. PHASE_PRINT_VERBOSE_TESTTRACE(
  1627. Js::ObjTypeSpecPhase, functionBody,
  1628. L"Cache #%3d, Layout: %s, Profile info: %s\n",
  1629. i,
  1630. functionBody->GetInlineCache(i)->LayoutString(),
  1631. cacheType == Js::FldInfo_NoInfo ? L"none" :
  1632. (cacheType & Js::FldInfo_Polymorphic) ? L"polymorphic" : L"monomorphic");
  1633. if (cacheType == Js::FldInfo_NoInfo)
  1634. {
  1635. IncInlineCacheCount(noInfoInlineCacheCount);
  1636. continue;
  1637. }
  1638. Js::PolymorphicInlineCache * polymorphicCacheOnFunctionBody = functionBody->GetPolymorphicInlineCache(i);
  1639. bool isPolymorphic = (cacheType & Js::FldInfo_Polymorphic) != 0;
  1640. if (!isPolymorphic)
  1641. {
  1642. Js::InlineCache *inlineCache;
  1643. if(function && Js::ScriptFunctionWithInlineCache::Is(function))
  1644. {
  1645. inlineCache = Js::ScriptFunctionWithInlineCache::FromVar(function)->GetInlineCache(i);
  1646. }
  1647. else
  1648. {
  1649. inlineCache = functionBody->GetInlineCache(i);
  1650. }
  1651. Js::ObjTypeSpecFldInfo* objTypeSpecFldInfo = nullptr;
  1652. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1653. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_VERBOSE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  1654. {
  1655. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1656. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1657. Js::PropertyId propertyId = functionBody->GetPropertyIdFromCacheId(i);
  1658. Js::PropertyRecord const * const propertyRecord = functionBody->GetScriptContext()->GetPropertyName(propertyId);
  1659. Output::Print(L"ObTypeSpec: top function %s (%s), function %s (%s): cloning mono cache for %s (#%d) cache %d \n",
  1660. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer),
  1661. functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer2), propertyRecord->GetBuffer(), propertyId, i);
  1662. Output::Flush();
  1663. }
  1664. #endif
  1665. IncInlineCacheCount(monoInlineCacheCount);
  1666. if (inlineCache->IsEmpty())
  1667. {
  1668. IncInlineCacheCount(emptyMonoInlineCacheCount);
  1669. }
  1670. if(!PHASE_OFF(Js::ObjTypeSpecPhase, functionBody) || !PHASE_OFF(Js::FixedMethodsPhase, functionBody) || !PHASE_OFF(Js::UseFixedDataPropsPhase, functionBody))
  1671. {
  1672. if(cacheType & (Js::FldInfo_FromLocal | Js::FldInfo_FromLocalWithoutProperty | Js::FldInfo_FromProto))
  1673. {
  1674. // WinBlue 170722: Disable ObjTypeSpec optimization for activation object in debug mode,
  1675. // as it can result in BailOutFailedTypeCheck before locals are set to undefined,
  1676. // which can result in using garbage object during bailout/restore values.
  1677. if (!(IsInDebugMode() && inlineCache->GetType() &&
  1678. inlineCache->GetType()->GetTypeId() == Js::TypeIds_ActivationObject))
  1679. {
  1680. objTypeSpecFldInfo = Js::ObjTypeSpecFldInfo::CreateFrom(objTypeSpecFldInfoList->Count(), inlineCache, i, entryPoint, topFunctionBody, functionBody, InlineCacheStatsArg(jitTimeData));
  1681. if (objTypeSpecFldInfo)
  1682. {
  1683. IncInlineCacheCount(clonedMonoInlineCacheCount);
  1684. if (!PHASE_OFF(Js::InlineApplyTargetPhase, functionBody) && (cacheType & Js::FldInfo_InlineCandidate))
  1685. {
  1686. if (IsInlinee || objTypeSpecFldInfo->isBuiltIn)
  1687. {
  1688. inlineApplyTarget = true;
  1689. }
  1690. }
  1691. if (!PHASE_OFF(Js::InlineCallTargetPhase, functionBody) && (cacheType & Js::FldInfo_InlineCandidate))
  1692. {
  1693. inlineCallTarget = true;
  1694. }
  1695. if (!isJitTimeDataComputed)
  1696. {
  1697. jitTimeData->GetObjTypeSpecFldInfoArray()->SetInfo(recycler, functionBody, i, objTypeSpecFldInfo);
  1698. objTypeSpecFldInfoList->Prepend(objTypeSpecFldInfo);
  1699. }
  1700. }
  1701. }
  1702. }
  1703. }
  1704. if(!PHASE_OFF(Js::FixAccessorPropsPhase, functionBody))
  1705. {
  1706. if (!objTypeSpecFldInfo && (cacheType & Js::FldInfo_FromAccessor) && (cacheType & Js::FldInfo_InlineCandidate))
  1707. {
  1708. objTypeSpecFldInfo = Js::ObjTypeSpecFldInfo::CreateFrom(objTypeSpecFldInfoList->Count(), inlineCache, i, entryPoint, topFunctionBody, functionBody, InlineCacheStatsArg(jitTimeData));
  1709. if (objTypeSpecFldInfo)
  1710. {
  1711. inlineGetterSetter = true;
  1712. if (!isJitTimeDataComputed)
  1713. {
  1714. IncInlineCacheCount(clonedMonoInlineCacheCount);
  1715. jitTimeData->GetObjTypeSpecFldInfoArray()->SetInfo(recycler, functionBody, i, objTypeSpecFldInfo);
  1716. objTypeSpecFldInfoList->Prepend(objTypeSpecFldInfo);
  1717. }
  1718. }
  1719. }
  1720. }
  1721. if (!PHASE_OFF(Js::RootObjectFldFastPathPhase, functionBody))
  1722. {
  1723. if (i >= functionBody->GetRootObjectLoadInlineCacheStart() && inlineCache->IsLocal())
  1724. {
  1725. void * rawType = inlineCache->u.local.type;
  1726. Js::Type * type = TypeWithoutAuxSlotTag(rawType);
  1727. Js::RootObjectBase * rootObject = functionBody->GetRootObject();
  1728. if (rootObject->GetType() == type)
  1729. {
  1730. Js::BigPropertyIndex propertyIndex = inlineCache->u.local.slotIndex;
  1731. if (rawType == type)
  1732. {
  1733. // type is not tagged, inline slot
  1734. propertyIndex = rootObject->GetPropertyIndexFromInlineSlotIndex(inlineCache->u.local.slotIndex);
  1735. }
  1736. else
  1737. {
  1738. propertyIndex = rootObject->GetPropertyIndexFromAuxSlotIndex(inlineCache->u.local.slotIndex);
  1739. }
  1740. Js::PropertyAttributes attributes;
  1741. if (rootObject->GetAttributesWithPropertyIndex(functionBody->GetPropertyIdFromCacheId(i), propertyIndex, &attributes)
  1742. && (attributes & PropertyConfigurable) == 0
  1743. && !isJitTimeDataComputed)
  1744. {
  1745. // non configurable
  1746. if (objTypeSpecFldInfo == nullptr)
  1747. {
  1748. objTypeSpecFldInfo = Js::ObjTypeSpecFldInfo::CreateFrom(objTypeSpecFldInfoList->Count(), inlineCache, i, entryPoint, topFunctionBody, functionBody, InlineCacheStatsArg(jitTimeData));
  1749. if (objTypeSpecFldInfo)
  1750. {
  1751. IncInlineCacheCount(clonedMonoInlineCacheCount);
  1752. jitTimeData->GetObjTypeSpecFldInfoArray()->SetInfo(recycler, functionBody, i, objTypeSpecFldInfo);
  1753. objTypeSpecFldInfoList->Prepend(objTypeSpecFldInfo);
  1754. }
  1755. }
  1756. if (objTypeSpecFldInfo != nullptr)
  1757. {
  1758. objTypeSpecFldInfo->SetRootObjectNonConfigurableField(i < functionBody->GetRootObjectStoreInlineCacheStart());
  1759. }
  1760. }
  1761. }
  1762. }
  1763. }
  1764. }
  1765. // Even if the FldInfo says that the field access may be polymorphic, be optimistic that if the function object has inline caches, they'll be monomorphic
  1766. else if(function && Js::ScriptFunctionWithInlineCache::Is(function) && (cacheType & Js::FldInfo_InlineCandidate || !polymorphicCacheOnFunctionBody))
  1767. {
  1768. Js::InlineCache *inlineCache = Js::ScriptFunctionWithInlineCache::FromVar(function)->GetInlineCache(i);
  1769. Js::ObjTypeSpecFldInfo* objTypeSpecFldInfo = nullptr;
  1770. if(!PHASE_OFF(Js::ObjTypeSpecPhase, functionBody) || !PHASE_OFF(Js::FixedMethodsPhase, functionBody))
  1771. {
  1772. if(cacheType & (Js::FldInfo_FromLocal | Js::FldInfo_FromProto)) // Remove FldInfo_FromLocal?
  1773. {
  1774. // WinBlue 170722: Disable ObjTypeSpec optimization for activation object in debug mode,
  1775. // as it can result in BailOutFailedTypeCheck before locals are set to undefined,
  1776. // which can result in using garbage object during bailout/restore values.
  1777. if (!(IsInDebugMode() && inlineCache->GetType() &&
  1778. inlineCache->GetType()->GetTypeId() == Js::TypeIds_ActivationObject))
  1779. {
  1780. objTypeSpecFldInfo = Js::ObjTypeSpecFldInfo::CreateFrom(objTypeSpecFldInfoList->Count(), inlineCache, i, entryPoint, topFunctionBody, functionBody, InlineCacheStatsArg(jitTimeData));
  1781. if (objTypeSpecFldInfo)
  1782. {
  1783. IncInlineCacheCount(clonedMonoInlineCacheCount);
  1784. if (!PHASE_OFF(Js::InlineApplyTargetPhase, functionBody) && IsInlinee && (cacheType & Js::FldInfo_InlineCandidate))
  1785. {
  1786. inlineApplyTarget = true;
  1787. }
  1788. if (!isJitTimeDataComputed)
  1789. {
  1790. jitTimeData->GetObjTypeSpecFldInfoArray()->SetInfo(recycler, functionBody, i, objTypeSpecFldInfo);
  1791. objTypeSpecFldInfoList->Prepend(objTypeSpecFldInfo);
  1792. }
  1793. }
  1794. }
  1795. }
  1796. }
  1797. }
  1798. else
  1799. {
  1800. const auto polymorphicInlineCache = functionBody->GetPolymorphicInlineCache(i);
  1801. if (polymorphicInlineCache != nullptr)
  1802. {
  1803. IncInlineCacheCount(polyInlineCacheCount);
  1804. if (profileData->GetFldInfo(functionBody, i)->ShouldUsePolymorphicInlineCache())
  1805. {
  1806. IncInlineCacheCount(highUtilPolyInlineCacheCount);
  1807. }
  1808. else
  1809. {
  1810. IncInlineCacheCount(lowUtilPolyInlineCacheCount);
  1811. }
  1812. if (!PHASE_OFF(Js::EquivObjTypeSpecPhase, topFunctionBody) && !topFunctionBody->GetAnyDynamicProfileInfo()->IsEquivalentObjTypeSpecDisabled())
  1813. {
  1814. if (!polymorphicInlineCache->GetIgnoreForEquivalentObjTypeSpec() || (polymorphicInlineCache->GetCloneForJitTimeUse() && !PHASE_OFF(Js::PolymorphicInlinePhase, functionBody) && !PHASE_OFF(Js::PolymorphicInlineFixedMethodsPhase, functionBody)))
  1815. {
  1816. #if ENABLE_DEBUG_CONFIG_OPTIONS
  1817. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_VERBOSE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  1818. {
  1819. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1820. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1821. Js::PropertyId propertyId = functionBody->GetPropertyIdFromCacheId(i);
  1822. Js::PropertyRecord const * const propertyRecord = functionBody->GetScriptContext()->GetPropertyName(propertyId);
  1823. Output::Print(L"ObTypeSpec: top function %s (%s), function %s (%s): cloning poly cache for %s (#%d) cache %d \n",
  1824. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer),
  1825. functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer2), propertyRecord->GetBuffer(), propertyId, i);
  1826. Output::Flush();
  1827. }
  1828. #endif
  1829. Js::ObjTypeSpecFldInfo* objTypeSpecFldInfo = Js::ObjTypeSpecFldInfo::CreateFrom(objTypeSpecFldInfoList->Count(), polymorphicInlineCache, i, entryPoint, topFunctionBody, functionBody, InlineCacheStatsArg(jitTimeData));
  1830. if (objTypeSpecFldInfo != nullptr)
  1831. {
  1832. if (!isJitTimeDataComputed)
  1833. {
  1834. jitTimeData->GetObjTypeSpecFldInfoArray()->SetInfo(recycler, functionBody, i, objTypeSpecFldInfo);
  1835. IncInlineCacheCount(clonedPolyInlineCacheCount);
  1836. objTypeSpecFldInfoList->Prepend(objTypeSpecFldInfo);
  1837. }
  1838. if (!PHASE_OFF(Js::InlineAccessorsPhase, functionBody) && (cacheType & Js::FldInfo_FromAccessor) && (cacheType & Js::FldInfo_InlineCandidate))
  1839. {
  1840. inlineGetterSetter = true;
  1841. }
  1842. }
  1843. }
  1844. else
  1845. {
  1846. IncInlineCacheCount(ignoredPolyInlineCacheCount);
  1847. }
  1848. }
  1849. else
  1850. {
  1851. IncInlineCacheCount(disabledPolyInlineCacheCount);
  1852. }
  1853. }
  1854. else
  1855. {
  1856. IncInlineCacheCount(nullPolyInlineCacheCount);
  1857. }
  1858. if (polymorphicInlineCache != nullptr)
  1859. {
  1860. if (PHASE_VERBOSE_TRACE1(Js::PolymorphicInlineCachePhase))
  1861. {
  1862. if (IsInlinee) Output::Print(L"\t");
  1863. Output::Print(L"\t%d: PIC size = %d\n", i, polymorphicInlineCache->GetSize());
  1864. #if DBG_DUMP
  1865. polymorphicInlineCache->Dump();
  1866. #endif
  1867. }
  1868. else if (PHASE_TRACE1(Js::PolymorphicInlineCachePhase))
  1869. {
  1870. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  1871. Js::PropertyId propertyId = functionBody->GetPropertyIdFromCacheId(i);
  1872. Js::PropertyRecord const * const propertyRecord = functionBody->GetScriptContext()->GetPropertyName(propertyId);
  1873. Output::Print(L"Trace PIC JIT function %s (%s) field: %s (index: %d) \n", functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer),
  1874. propertyRecord->GetBuffer(), i);
  1875. }
  1876. byte polyCacheUtil = profileData->GetFldInfo(functionBody, i)->polymorphicInlineCacheUtilization;
  1877. entryPoint->GetPolymorphicInlineCacheInfo()->SetPolymorphicInlineCache(functionBody, i, polymorphicInlineCache, IsInlinee, polyCacheUtil);
  1878. if (IsInlinee)
  1879. {
  1880. Assert(entryPoint->GetPolymorphicInlineCacheInfo()->GetInlineeInfo(functionBody)->GetPolymorphicInlineCaches()->GetInlineCache(functionBody, i) == polymorphicInlineCache);
  1881. }
  1882. else
  1883. {
  1884. Assert(entryPoint->GetPolymorphicInlineCacheInfo()->GetSelfInfo()->GetPolymorphicInlineCaches()->GetInlineCache(functionBody, i) == polymorphicInlineCache);
  1885. }
  1886. }
  1887. else if(IsInlinee && CONFIG_FLAG(CloneInlinedPolymorphicCaches))
  1888. {
  1889. // Clone polymorphic inline caches for runtime usage in this inlinee. The JIT should only use the pointers to
  1890. // the inline caches, as their cached data is not guaranteed to be stable while jitting.
  1891. Js::InlineCache *const inlineCache =
  1892. function && Js::ScriptFunctionWithInlineCache::Is(function)
  1893. ? Js::ScriptFunctionWithInlineCache::FromVar(function)->GetInlineCache(i)
  1894. : functionBody->GetInlineCache(i);
  1895. Js::PropertyId propertyId = functionBody->GetPropertyIdFromCacheId(i);
  1896. const auto clone = runtimeData->ClonedInlineCaches()->GetInlineCache(functionBody, i);
  1897. if (clone)
  1898. {
  1899. inlineCache->CopyTo(propertyId, functionBody->GetScriptContext(), clone);
  1900. }
  1901. else
  1902. {
  1903. runtimeData->ClonedInlineCaches()->SetInlineCache(
  1904. recycler,
  1905. functionBody,
  1906. i,
  1907. inlineCache->Clone(propertyId, functionBody->GetScriptContext()));
  1908. }
  1909. }
  1910. }
  1911. }
  1912. }
  1913. // Gather code gen data for inlinees
  1914. if(IsInlinee ? !inliningDecider.InlineIntoInliner(functionBody) : !inliningDecider.InlineIntoTopFunc())
  1915. {
  1916. return;
  1917. }
  1918. class AutoCleanup
  1919. {
  1920. private:
  1921. Js::FunctionBody *const functionBody;
  1922. public:
  1923. AutoCleanup(Js::FunctionBody *const functionBody) : functionBody(functionBody)
  1924. {
  1925. functionBody->OnBeginInlineInto();
  1926. }
  1927. ~AutoCleanup()
  1928. {
  1929. functionBody->OnEndInlineInto();
  1930. }
  1931. } autoCleanup(functionBody);
  1932. const auto profiledCallSiteCount = functionBody->GetProfiledCallSiteCount();
  1933. Assert(profiledCallSiteCount != 0 || functionBody->GetAnyDynamicProfileInfo()->hasLdFldCallSiteInfo());
  1934. if (profiledCallSiteCount && !isJitTimeDataComputed)
  1935. {
  1936. jitTimeData->inlineesBv = BVFixed::New<Recycler>(profiledCallSiteCount, recycler);
  1937. }
  1938. // Iterate through profiled call sites recursively and determine what should be inlined
  1939. for(Js::ProfileId profiledCallSiteId = 0; profiledCallSiteId < profiledCallSiteCount; ++profiledCallSiteId)
  1940. {
  1941. Js::FunctionInfo *const inlinee = inliningDecider.InlineCallSite(functionBody, profiledCallSiteId, recursiveInlineDepth);
  1942. if(!inlinee)
  1943. {
  1944. if (profileData->CallSiteHasProfileData(profiledCallSiteId))
  1945. {
  1946. jitTimeData->inlineesBv->Set(profiledCallSiteId);
  1947. }
  1948. //Try and see if this polymorphic call
  1949. Js::FunctionBody* inlineeFunctionBodyArray[Js::DynamicProfileInfo::maxPolymorphicInliningSize] = {0};
  1950. bool canInlineArray[Js::DynamicProfileInfo::maxPolymorphicInliningSize] = { 0 };
  1951. uint polyInlineeCount = inliningDecider.InlinePolymorphicCallSite(functionBody, profiledCallSiteId, inlineeFunctionBodyArray,
  1952. Js::DynamicProfileInfo::maxPolymorphicInliningSize, canInlineArray);
  1953. //We should be able to inline at least two functions here.
  1954. if (polyInlineeCount >= 2)
  1955. {
  1956. for (uint id = 0; id < polyInlineeCount; id++)
  1957. {
  1958. bool isInlined = canInlineArray[id];
  1959. Js::FunctionCodeGenRuntimeData *inlineeRunTimeData = IsInlinee ? runtimeData->EnsureInlinee(recycler, profiledCallSiteId, inlineeFunctionBodyArray[id]) : functionBody->EnsureInlineeCodeGenRuntimeData(recycler, profiledCallSiteId, inlineeFunctionBodyArray[id]);
  1960. if (!isJitTimeDataComputed)
  1961. {
  1962. Js::FunctionCodeGenJitTimeData *inlineeJitTimeData = jitTimeData->AddInlinee(recycler, profiledCallSiteId, inlineeFunctionBodyArray[id], isInlined);
  1963. if (isInlined)
  1964. {
  1965. GatherCodeGenData<true>(
  1966. recycler,
  1967. topFunctionBody,
  1968. inlineeFunctionBodyArray[id],
  1969. entryPoint,
  1970. inliningDecider,
  1971. objTypeSpecFldInfoList,
  1972. inlineeJitTimeData,
  1973. inlineeRunTimeData
  1974. );
  1975. AddInlineCacheStats(jitTimeData, inlineeJitTimeData);
  1976. }
  1977. }
  1978. }
  1979. }
  1980. }
  1981. else
  1982. {
  1983. jitTimeData->inlineesBv->Set(profiledCallSiteId);
  1984. Js::FunctionBody *const inlineeFunctionBody = inlinee->GetFunctionBody();
  1985. if(!inlineeFunctionBody )
  1986. {
  1987. if (!isJitTimeDataComputed)
  1988. {
  1989. jitTimeData->AddInlinee(recycler, profiledCallSiteId, inlinee);
  1990. }
  1991. continue;
  1992. }
  1993. // We are at a callsite that can be inlined. Let the callsite be foo().
  1994. // If foo has inline caches on it, we need to be able to get those for cloning.
  1995. // To do this,
  1996. // 1. Retrieve the inline cache associated with the load of "foo",
  1997. // 2. Try to get the fixed function object corresponding to "foo",
  1998. // 3. Pass the fixed function object to GatherCodeGenData which can clone its inline caches.
  1999. uint ldFldInlineCacheIndex = profileData->GetLdFldCacheIndexFromCallSiteInfo(functionBody, profiledCallSiteId);
  2000. Js::InlineCache * inlineCache = nullptr;
  2001. if ((ldFldInlineCacheIndex != Js::Constants::NoInlineCacheIndex) && (ldFldInlineCacheIndex < functionBody->GetInlineCacheCount()))
  2002. {
  2003. if(function && Js::ScriptFunctionWithInlineCache::Is(function))
  2004. {
  2005. inlineCache = Js::ScriptFunctionWithInlineCache::FromVar(function)->GetInlineCache(ldFldInlineCacheIndex);
  2006. }
  2007. else
  2008. {
  2009. inlineCache = functionBody->GetInlineCache(ldFldInlineCacheIndex);
  2010. }
  2011. }
  2012. Js::JavascriptFunction* fixedFunctionObject = nullptr;
  2013. if (inlineCache && (inlineCache->IsLocal() || inlineCache->IsProto()))
  2014. {
  2015. inlineCache->TryGetFixedMethodFromCache(functionBody, ldFldInlineCacheIndex, &fixedFunctionObject);
  2016. }
  2017. if (fixedFunctionObject && !fixedFunctionObject->GetFunctionInfo()->IsDeferred() && fixedFunctionObject->GetFunctionBody() != inlineeFunctionBody)
  2018. {
  2019. fixedFunctionObject = nullptr;
  2020. }
  2021. if (!PHASE_OFF(Js::InlineRecursivePhase, functionBody))
  2022. {
  2023. if (!isJitTimeDataComputed)
  2024. {
  2025. Js::FunctionCodeGenRuntimeData *inlineeRuntimeData = IsInlinee ? runtimeData->EnsureInlinee(recycler, profiledCallSiteId, inlineeFunctionBody) : functionBody->EnsureInlineeCodeGenRuntimeData(recycler, profiledCallSiteId, inlineeFunctionBody);
  2026. Js::FunctionCodeGenJitTimeData *inlineeJitTimeData = nullptr;
  2027. bool doShareJitTimeData = false;
  2028. // Share the jitTime data if i) it is a recursive call, ii) jitTimeData is not from a polymorphic chain, and iii) all the call sites are recursive
  2029. if (functionBody == inlineeFunctionBody // recursive call
  2030. && jitTimeData->GetNext() == nullptr // not from a polymorphic call site
  2031. && profiledCallSiteCount == functionBody->GetNumberOfRecursiveCallSites()) // all the callsites are recursive
  2032. {
  2033. jitTimeData->SetupRecursiveInlineeChain(recycler, profiledCallSiteId);
  2034. inlineeJitTimeData = jitTimeData;
  2035. doShareJitTimeData = true;
  2036. // If a recursive inliner has multiple recursive inlinees and if they hit the InlineCountMax
  2037. // threshold, then runtimeData for the inlinees may not be available (bug 2269097) for the inlinees
  2038. // as InlineCountMax threshold heuristics has higher priority than recursive inline heuristics. Since
  2039. // we share runtime data between recursive inliner and recursive inlinees, and all the call sites
  2040. // are recursive (we only do recursive inlining for functions where all the callsites are recursive),
  2041. // we can iterate over all the callsites of the inliner and setup the runtime data recursive inlinee chain
  2042. for (Js::ProfileId id = 0; id < profiledCallSiteCount; id++)
  2043. {
  2044. inlineeRuntimeData->SetupRecursiveInlineeChain(recycler, id, inlineeFunctionBody);
  2045. }
  2046. }
  2047. else
  2048. {
  2049. inlineeJitTimeData = jitTimeData->AddInlinee(recycler, profiledCallSiteId, inlinee);
  2050. }
  2051. GatherCodeGenData<true>(
  2052. recycler,
  2053. topFunctionBody,
  2054. inlineeFunctionBody,
  2055. entryPoint,
  2056. inliningDecider,
  2057. objTypeSpecFldInfoList,
  2058. inlineeJitTimeData,
  2059. inlineeRuntimeData,
  2060. fixedFunctionObject,
  2061. doShareJitTimeData,
  2062. functionBody == inlineeFunctionBody ? recursiveInlineDepth + 1 : 0);
  2063. if (jitTimeData != inlineeJitTimeData)
  2064. {
  2065. AddInlineCacheStats(jitTimeData, inlineeJitTimeData);
  2066. }
  2067. }
  2068. }
  2069. else
  2070. {
  2071. Js::FunctionCodeGenJitTimeData *const inlineeJitTimeData = jitTimeData->AddInlinee(recycler, profiledCallSiteId, inlinee);
  2072. GatherCodeGenData<true>(
  2073. recycler,
  2074. topFunctionBody,
  2075. inlineeFunctionBody,
  2076. entryPoint,
  2077. inliningDecider,
  2078. objTypeSpecFldInfoList,
  2079. inlineeJitTimeData,
  2080. IsInlinee
  2081. ? runtimeData->EnsureInlinee(recycler, profiledCallSiteId, inlineeFunctionBody)
  2082. : functionBody->EnsureInlineeCodeGenRuntimeData(recycler, profiledCallSiteId, inlineeFunctionBody),
  2083. fixedFunctionObject);
  2084. AddInlineCacheStats(jitTimeData, inlineeJitTimeData);
  2085. }
  2086. }
  2087. }
  2088. // Iterate through inlineCache getter setter and apply call sites recursively and determine what should be inlined
  2089. if (inlineGetterSetter || inlineApplyTarget || inlineCallTarget)
  2090. {
  2091. for(uint inlineCacheIndex = 0; inlineCacheIndex < functionBody->GetInlineCacheCount(); ++inlineCacheIndex)
  2092. {
  2093. const auto cacheType = profileData->GetFldInfo(functionBody, inlineCacheIndex)->flags;
  2094. if(cacheType == Js::FldInfo_NoInfo)
  2095. {
  2096. continue;
  2097. }
  2098. bool getSetInlineCandidate = inlineGetterSetter && ((cacheType & Js::FldInfo_FromAccessor) != 0);
  2099. bool callApplyInlineCandidate = (inlineCallTarget || inlineApplyTarget) && ((cacheType & Js::FldInfo_FromAccessor) == 0);
  2100. // 1. Do not inline if the x in a.x is both a getter/setter and is followed by a .apply
  2101. // 2. If we were optimistic earlier in assuming that the inline caches on the function object would be monomorphic and asserted that we may possibly inline apply target,
  2102. // then even if the field info flags say that the field access may be polymorphic, carry that optimism forward and try to inline apply target.
  2103. if (getSetInlineCandidate ^ callApplyInlineCandidate)
  2104. {
  2105. Js::ObjTypeSpecFldInfo* info = jitTimeData->GetObjTypeSpecFldInfoArray()->GetInfo(functionBody, inlineCacheIndex);
  2106. if (info == nullptr)
  2107. {
  2108. continue;
  2109. }
  2110. if (!(getSetInlineCandidate && info->UsesAccessor()) && !(callApplyInlineCandidate && !info->IsPoly()))
  2111. {
  2112. continue;
  2113. }
  2114. Js::JavascriptFunction* inlineeFunction = info->GetFieldValueAsFunctionIfAvailable();
  2115. if (inlineeFunction == nullptr)
  2116. {
  2117. continue;
  2118. }
  2119. Js::FunctionInfo* inlineeFunctionInfo = inlineeFunction->GetFunctionInfo();
  2120. Js::FunctionProxy* inlineeFunctionProxy = inlineeFunctionInfo->GetFunctionProxy();
  2121. if (inlineeFunctionProxy != nullptr && !functionBody->CheckCalleeContextForInlining(inlineeFunctionProxy))
  2122. {
  2123. continue;
  2124. }
  2125. const auto inlinee = inliningDecider.Inline(functionBody, inlineeFunctionInfo, false /*isConstructorCall*/, false /*isPolymorphicCall*/, 0, (uint16)inlineCacheIndex, 0, false);
  2126. if(!inlinee)
  2127. {
  2128. continue;
  2129. }
  2130. const auto inlineeFunctionBody = inlinee->GetFunctionBody();
  2131. if(!inlineeFunctionBody)
  2132. {
  2133. if ((
  2134. #ifdef ENABLE_DOM_FAST_PATH
  2135. inlinee->GetLocalFunctionId() == Js::JavascriptBuiltInFunction::DOMFastPathGetter ||
  2136. inlinee->GetLocalFunctionId() == Js::JavascriptBuiltInFunction::DOMFastPathSetter ||
  2137. #endif
  2138. (inlineeFunctionInfo->GetAttributes() & Js::FunctionInfo::Attributes::BuiltInInlinableAsLdFldInlinee) != 0) &&
  2139. !isJitTimeDataComputed)
  2140. {
  2141. jitTimeData->AddLdFldInlinee(recycler, inlineCacheIndex, inlinee);
  2142. }
  2143. continue;
  2144. }
  2145. Js::FunctionCodeGenRuntimeData *const inlineeRuntimeData = IsInlinee ? runtimeData->EnsureLdFldInlinee(recycler, inlineCacheIndex, inlineeFunctionBody) :
  2146. functionBody->EnsureLdFldInlineeCodeGenRuntimeData(recycler, inlineCacheIndex, inlineeFunctionBody);
  2147. if (inlineeRuntimeData->GetFunctionBody() != inlineeFunctionBody)
  2148. {
  2149. //There are obscure cases where profileData has not yet seen the polymorphic LdFld but the inlineCache has the newer object from which getter is invoked.
  2150. //In this case we don't want to inline that getter. Polymorphic bit will be set later correctly.
  2151. //See WinBlue 54540
  2152. continue;
  2153. }
  2154. if (!isJitTimeDataComputed)
  2155. {
  2156. Js::FunctionCodeGenJitTimeData *inlineeJitTimeData = jitTimeData->AddLdFldInlinee(recycler, inlineCacheIndex, inlinee);
  2157. GatherCodeGenData<true>(
  2158. recycler,
  2159. topFunctionBody,
  2160. inlineeFunctionBody,
  2161. entryPoint,
  2162. inliningDecider,
  2163. objTypeSpecFldInfoList,
  2164. inlineeJitTimeData,
  2165. inlineeRuntimeData,
  2166. nullptr);
  2167. AddInlineCacheStats(jitTimeData, inlineeJitTimeData);
  2168. }
  2169. }
  2170. }
  2171. }
  2172. #ifdef FIELD_ACCESS_STATS
  2173. if (PHASE_VERBOSE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_VERBOSE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  2174. {
  2175. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  2176. if (jitTimeData->inlineCacheStats)
  2177. {
  2178. Output::Print(L"ObTypeSpec: gathered code gen data for function %s (#%u) inlined %s (#%u): inline cache stats:\n",
  2179. topFunctionBody->GetDisplayName(), topFunctionBody->GetFunctionNumber(), functionBody->GetDisplayName(), functionBody->GetFunctionNumber());
  2180. Output::Print(L" overall: total %u, no profile info %u\n",
  2181. jitTimeData->inlineCacheStats->totalInlineCacheCount, jitTimeData->inlineCacheStats->noInfoInlineCacheCount);
  2182. Output::Print(L" mono: total %u, empty %u, cloned %u\n",
  2183. jitTimeData->inlineCacheStats->monoInlineCacheCount, jitTimeData->inlineCacheStats->emptyMonoInlineCacheCount,
  2184. jitTimeData->inlineCacheStats->clonedMonoInlineCacheCount);
  2185. Output::Print(L" poly: total %u (high %u, low %u), empty %u, equivalent %u, cloned %u\n",
  2186. jitTimeData->inlineCacheStats->polyInlineCacheCount, jitTimeData->inlineCacheStats->highUtilPolyInlineCacheCount,
  2187. jitTimeData->inlineCacheStats->lowUtilPolyInlineCacheCount, jitTimeData->inlineCacheStats->emptyPolyInlineCacheCount,
  2188. jitTimeData->inlineCacheStats->equivPolyInlineCacheCount, jitTimeData->inlineCacheStats->clonedPolyInlineCacheCount);
  2189. }
  2190. else
  2191. {
  2192. Output::Print(L"ObTypeSpec: function %s (%s): inline cache stats unavailable\n", topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer));
  2193. }
  2194. Output::Flush();
  2195. }
  2196. #endif
  2197. #undef SetInlineCacheCount
  2198. #undef IncInlineCacheCount
  2199. #undef AddInlineCacheStats
  2200. }
  2201. Js::CodeGenRecyclableData *
  2202. NativeCodeGenerator::GatherCodeGenData(Js::FunctionBody *const topFunctionBody, Js::FunctionBody *const functionBody, Js::EntryPointInfo *const entryPoint, CodeGenWorkItem* workItem, void* function)
  2203. {
  2204. ASSERT_THREAD();
  2205. Assert(functionBody);
  2206. #ifdef PROFILE_EXEC
  2207. class AutoProfile
  2208. {
  2209. private:
  2210. Js::ScriptContextProfiler *const codeGenProfiler;
  2211. public:
  2212. AutoProfile(Js::ScriptContextProfiler *const codeGenProfiler) : codeGenProfiler(codeGenProfiler)
  2213. {
  2214. ProfileBegin(codeGenProfiler, Js::DelayPhase);
  2215. ProfileBegin(codeGenProfiler, Js::GatherCodeGenDataPhase);
  2216. }
  2217. ~AutoProfile()
  2218. {
  2219. ProfileEnd(codeGenProfiler, Js::GatherCodeGenDataPhase);
  2220. ProfileEnd(codeGenProfiler, Js::DelayPhase);
  2221. }
  2222. } autoProfile(foregroundCodeGenProfiler);
  2223. #endif
  2224. const auto recycler = scriptContext->GetRecycler();
  2225. {
  2226. const auto jitTimeData = RecyclerNew(recycler, Js::FunctionCodeGenJitTimeData, functionBody, entryPoint);
  2227. InliningDecider inliningDecider(functionBody, workItem->Type() == JsLoopBodyWorkItemType, this->IsInDebugMode(), workItem->GetJitMode());
  2228. BEGIN_TEMP_ALLOCATOR(gatherCodeGenDataAllocator, scriptContext, L"GatherCodeGenData");
  2229. ObjTypeSpecFldInfoList* objTypeSpecFldInfoList = JitAnew(gatherCodeGenDataAllocator, ObjTypeSpecFldInfoList, gatherCodeGenDataAllocator);
  2230. #if ENABLE_DEBUG_CONFIG_OPTIONS
  2231. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  2232. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  2233. if (PHASE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  2234. {
  2235. Output::Print(L"ObjTypeSpec: top function %s (%s), function %s (%s): GatherCodeGenData(): \n",
  2236. topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer), functionBody->GetDisplayName(), functionBody->GetDebugNumberSet(debugStringBuffer2));
  2237. }
  2238. #endif
  2239. GatherCodeGenData<false>(recycler, topFunctionBody, functionBody, entryPoint, inliningDecider, objTypeSpecFldInfoList, jitTimeData, nullptr, function ? Js::JavascriptFunction::FromVar(function) : nullptr, 0);
  2240. #ifdef FIELD_ACCESS_STATS
  2241. Js::FieldAccessStats* fieldAccessStats = entryPoint->EnsureFieldAccessStats(recycler);
  2242. fieldAccessStats->Add(jitTimeData->inlineCacheStats);
  2243. entryPoint->GetScriptContext()->RecordFieldAccessStats(topFunctionBody, fieldAccessStats);
  2244. #endif
  2245. #ifdef FIELD_ACCESS_STATS
  2246. if (PHASE_TRACE(Js::ObjTypeSpecPhase, topFunctionBody) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, topFunctionBody))
  2247. {
  2248. auto stats = jitTimeData->inlineCacheStats;
  2249. Output::Print(L"ObjTypeSpec: gathered code gen data for function %s (%s): inline cache stats:\n", topFunctionBody->GetDisplayName(), topFunctionBody->GetDebugNumberSet(debugStringBuffer));
  2250. Output::Print(L" overall: total %u, no profile info %u\n", stats->totalInlineCacheCount, stats->noInfoInlineCacheCount);
  2251. Output::Print(L" mono: total %u, empty %u, cloned %u\n",
  2252. stats->monoInlineCacheCount, stats->emptyMonoInlineCacheCount, stats->clonedMonoInlineCacheCount);
  2253. Output::Print(L" poly: total %u (high %u, low %u), null %u, empty %u, ignored %u, disabled %u, equivalent %u, non-equivalent %u, cloned %u\n",
  2254. stats->polyInlineCacheCount, stats->highUtilPolyInlineCacheCount, stats->lowUtilPolyInlineCacheCount,
  2255. stats->nullPolyInlineCacheCount, stats->emptyPolyInlineCacheCount, stats->ignoredPolyInlineCacheCount, stats->disabledPolyInlineCacheCount,
  2256. stats->equivPolyInlineCacheCount, stats->nonEquivPolyInlineCacheCount, stats->clonedPolyInlineCacheCount);
  2257. }
  2258. #endif
  2259. uint objTypeSpecFldInfoCount = objTypeSpecFldInfoList->Count();
  2260. jitTimeData->SetGlobalObjTypeSpecFldInfoArray(RecyclerNewArray(recycler, Js::ObjTypeSpecFldInfo*, objTypeSpecFldInfoCount), objTypeSpecFldInfoCount);
  2261. uint propertyInfoId = objTypeSpecFldInfoCount - 1;
  2262. FOREACH_SLISTCOUNTED_ENTRY(Js::ObjTypeSpecFldInfo*, info, objTypeSpecFldInfoList)
  2263. {
  2264. // Clear field values we don't need so we don't unnecessarily pin them while JIT-ing.
  2265. if (!info->GetKeepFieldValue() && !(info->IsPoly() && info->DoesntHaveEquivalence()))
  2266. {
  2267. info->SetFieldValue(nullptr);
  2268. }
  2269. jitTimeData->SetGlobalObjTypeSpecFldInfo(propertyInfoId--, info);
  2270. }
  2271. NEXT_SLISTCOUNTED_ENTRY;
  2272. END_TEMP_ALLOCATOR(gatherCodeGenDataAllocator, scriptContext);
  2273. return RecyclerNew(recycler, Js::CodeGenRecyclableData, jitTimeData);
  2274. }
  2275. }
  2276. bool
  2277. NativeCodeGenerator::IsBackgroundJIT() const
  2278. {
  2279. return Processor()->ProcessesInBackground();
  2280. }
  2281. void
  2282. NativeCodeGenerator::EnterScriptStart()
  2283. {
  2284. // We should be in execution
  2285. Assert(scriptContext->GetThreadContext()->IsScriptActive());
  2286. Assert(scriptContext->GetThreadContext()->IsInScript());
  2287. if(CONFIG_FLAG(BgJitDelay) == 0 ||
  2288. Js::Configuration::Global.flags.EnforceExecutionModeLimits ||
  2289. scriptContext->GetThreadContext()->GetCallRootLevel() > 2)
  2290. {
  2291. return;
  2292. }
  2293. if (pendingCodeGenWorkItems == 0 || pendingCodeGenWorkItems > (uint)CONFIG_FLAG(BgJitPendingFuncCap))
  2294. {
  2295. // We have already finish code gen for this script context
  2296. // Only wait if the script is small and we can easily pre-JIT all of it.
  2297. return;
  2298. }
  2299. if (this->IsClosed())
  2300. {
  2301. return;
  2302. }
  2303. // Don't need to do anything if we're in debug mode
  2304. if (this->IsInDebugMode() && !Js::Configuration::Global.EnableJitInDebugMode())
  2305. {
  2306. return;
  2307. }
  2308. // We've already done a few calls to this scriptContext, don't bother waiting.
  2309. if (scriptContext->callCount >= 3)
  2310. {
  2311. return;
  2312. }
  2313. scriptContext->callCount++;
  2314. if (scriptContext->GetDeferredBody())
  2315. {
  2316. OUTPUT_TRACE(Js::DelayPhase, L"No delay because the script has a deferred body\n");
  2317. return;
  2318. }
  2319. if(CONFIG_FLAG(BgJitDelayFgBuffer) >= CONFIG_FLAG(BgJitDelay))
  2320. {
  2321. return;
  2322. }
  2323. class AutoCleanup
  2324. {
  2325. private:
  2326. Js::ScriptContextProfiler *const codeGenProfiler;
  2327. public:
  2328. AutoCleanup(Js::ScriptContextProfiler *const codeGenProfiler) : codeGenProfiler(codeGenProfiler)
  2329. {
  2330. JS_ETW(EventWriteJSCRIPT_NATIVECODEGEN_DELAY_START(this, 0));
  2331. #ifdef PROFILE_EXEC
  2332. ProfileBegin(codeGenProfiler, Js::DelayPhase);
  2333. ProfileBegin(codeGenProfiler, Js::SpeculationPhase);
  2334. #endif
  2335. }
  2336. ~AutoCleanup()
  2337. {
  2338. #ifdef PROFILE_EXEC
  2339. ProfileEnd(codeGenProfiler, Js::SpeculationPhase);
  2340. ProfileEnd(codeGenProfiler, Js::DelayPhase);
  2341. #endif
  2342. JS_ETW(EventWriteJSCRIPT_NATIVECODEGEN_DELAY_STOP(this, 0));
  2343. }
  2344. } autoCleanup(
  2345. #ifdef PROFILE_EXEC
  2346. this->foregroundCodeGenProfiler
  2347. #else
  2348. nullptr
  2349. #endif
  2350. );
  2351. Processor()->PrioritizeManagerAndWait(this, CONFIG_FLAG(BgJitDelay) - CONFIG_FLAG(BgJitDelayFgBuffer));
  2352. }
  2353. // Is the given address within one of our JIT'd frame?
  2354. bool
  2355. IsNativeFunctionAddr(Js::ScriptContext *scriptContext, void * address)
  2356. {
  2357. if (!scriptContext->GetNativeCodeGenerator())
  2358. {
  2359. return false;
  2360. }
  2361. return scriptContext->GetNativeCodeGenerator()->IsNativeFunctionAddr(address);
  2362. }
  2363. void
  2364. FreeNativeCodeGenAllocation(Js::ScriptContext *scriptContext, void * address)
  2365. {
  2366. if (!scriptContext->GetNativeCodeGenerator())
  2367. {
  2368. return;
  2369. }
  2370. scriptContext->GetNativeCodeGenerator()->QueueFreeNativeCodeGenAllocation(address);
  2371. }
  2372. bool TryReleaseNonHiPriWorkItem(Js::ScriptContext* scriptContext, CodeGenWorkItem* workItem)
  2373. {
  2374. if (!scriptContext->GetNativeCodeGenerator())
  2375. {
  2376. return false;
  2377. }
  2378. return scriptContext->GetNativeCodeGenerator()->TryReleaseNonHiPriWorkItem(workItem);
  2379. }
  2380. // Called from within the lock
  2381. // The work item cannot be used after this point if it returns true
  2382. bool NativeCodeGenerator::TryReleaseNonHiPriWorkItem(CodeGenWorkItem* workItem)
  2383. {
  2384. // If its the highest priority, don't release it, let the job continue
  2385. if (workItem->IsInJitQueue())
  2386. {
  2387. return false;
  2388. }
  2389. workItems.Unlink(workItem);
  2390. Assert(!workItem->RecyclableData());
  2391. workItem->Delete();
  2392. return true;
  2393. }
  2394. // Called on the same thread that did the allocation
  2395. bool
  2396. NativeCodeGenerator::IsNativeFunctionAddr(void * address)
  2397. {
  2398. return
  2399. (this->backgroundAllocators && this->backgroundAllocators->emitBufferManager.IsInRange(address)) ||
  2400. (this->foregroundAllocators && this->foregroundAllocators->emitBufferManager.IsInRange(address));
  2401. }
  2402. void
  2403. NativeCodeGenerator::FreeNativeCodeGenAllocation(void* address)
  2404. {
  2405. if(this->backgroundAllocators)
  2406. {
  2407. this->backgroundAllocators->emitBufferManager.FreeAllocation(address);
  2408. }
  2409. }
  2410. void
  2411. NativeCodeGenerator::QueueFreeNativeCodeGenAllocation(void* address)
  2412. {
  2413. ASSERT_THREAD();
  2414. if(IsClosed())
  2415. {
  2416. return;
  2417. }
  2418. //DeRegister Entry Point for CFG
  2419. ThreadContext::GetContextForCurrentThread()->SetValidCallTargetForCFG(address, false);
  2420. // The foreground allocators may have been used
  2421. if(this->foregroundAllocators && this->foregroundAllocators->emitBufferManager.FreeAllocation(address))
  2422. {
  2423. return;
  2424. }
  2425. // The background allocators were used. Queue a job to free the allocation from the background thread.
  2426. this->freeLoopBodyManager.QueueFreeLoopBodyJob(address);
  2427. }
  2428. void NativeCodeGenerator::FreeLoopBodyJobManager::QueueFreeLoopBodyJob(void* codeAddress)
  2429. {
  2430. Assert(!this->isClosed);
  2431. FreeLoopBodyJob* job = HeapNewNoThrow(FreeLoopBodyJob, this, codeAddress);
  2432. if (job == nullptr)
  2433. {
  2434. FreeLoopBodyJob stackJob(this, codeAddress, false /* heapAllocated */);
  2435. {
  2436. AutoOptionalCriticalSection lock(Processor()->GetCriticalSection());
  2437. Processor()->AddJob(&stackJob);
  2438. }
  2439. Processor()->PrioritizeJobAndWait(this, &stackJob);
  2440. }
  2441. else
  2442. {
  2443. AutoOptionalCriticalSection lock(Processor()->GetCriticalSection());
  2444. if (Processor()->HasManager(this))
  2445. {
  2446. Processor()->AddJobAndProcessProactively<FreeLoopBodyJobManager, FreeLoopBodyJob*>(this, job);
  2447. }
  2448. else
  2449. {
  2450. HeapDelete(job);
  2451. }
  2452. }
  2453. }
  2454. #ifdef PROFILE_EXEC
  2455. void
  2456. NativeCodeGenerator::CreateProfiler(Js::ScriptContextProfiler * profiler)
  2457. {
  2458. Assert(this->foregroundCodeGenProfiler == nullptr);
  2459. this->foregroundCodeGenProfiler = profiler;
  2460. profiler->AddRef();
  2461. }
  2462. Js::ScriptContextProfiler *
  2463. NativeCodeGenerator::EnsureForegroundCodeGenProfiler()
  2464. {
  2465. if (Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag))
  2466. {
  2467. Assert(this->foregroundCodeGenProfiler != nullptr);
  2468. Assert(this->foregroundCodeGenProfiler->IsInitialized());
  2469. }
  2470. return this->foregroundCodeGenProfiler;
  2471. }
  2472. void
  2473. NativeCodeGenerator::SetProfilerFromNativeCodeGen(NativeCodeGenerator * nativeCodeGen)
  2474. {
  2475. Assert(Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag));
  2476. Assert(this->foregroundCodeGenProfiler != nullptr);
  2477. Assert(this->foregroundCodeGenProfiler->IsInitialized());
  2478. Assert(nativeCodeGen->foregroundCodeGenProfiler != nullptr);
  2479. Assert(nativeCodeGen->foregroundCodeGenProfiler->IsInitialized());
  2480. this->foregroundCodeGenProfiler->Release();
  2481. this->foregroundCodeGenProfiler = nativeCodeGen->foregroundCodeGenProfiler;
  2482. this->foregroundCodeGenProfiler->AddRef();
  2483. }
  2484. void
  2485. NativeCodeGenerator::ProfilePrint()
  2486. {
  2487. Js::ScriptContextProfiler *codegenProfiler = this->backgroundCodeGenProfiler;
  2488. if (Js::Configuration::Global.flags.Verbose)
  2489. {
  2490. //Print individual CodegenProfiler information in verbose mode
  2491. while (codegenProfiler)
  2492. {
  2493. codegenProfiler->ProfilePrint(Js::Configuration::Global.flags.Profile.GetFirstPhase());
  2494. codegenProfiler = codegenProfiler->next;
  2495. }
  2496. }
  2497. else
  2498. {
  2499. //Merge all the codegenProfiler for single snapshot.
  2500. codegenProfiler = codegenProfiler->next;
  2501. while (codegenProfiler)
  2502. {
  2503. this->backgroundCodeGenProfiler->ProfileMerge(codegenProfiler);
  2504. codegenProfiler = codegenProfiler->next;
  2505. }
  2506. this->backgroundCodeGenProfiler->ProfilePrint(Js::Configuration::Global.flags.Profile.GetFirstPhase());
  2507. }
  2508. }
  2509. void
  2510. NativeCodeGenerator::ProfileBegin(Js::ScriptContextProfiler *const profiler, Js::Phase phase)
  2511. {
  2512. AssertMsg((profiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  2513. "Profiler tag is supplied but the profiler pointer is NULL");
  2514. if (profiler)
  2515. {
  2516. profiler->ProfileBegin(phase);
  2517. }
  2518. }
  2519. void
  2520. NativeCodeGenerator::ProfileEnd(Js::ScriptContextProfiler *const profiler, Js::Phase phase)
  2521. {
  2522. AssertMsg((profiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  2523. "Profiler tag is supplied but the profiler pointer is NULL");
  2524. if (profiler)
  2525. {
  2526. profiler->ProfileEnd(phase);
  2527. }
  2528. }
  2529. #endif
  2530. void NativeCodeGenerator::AddToJitQueue(CodeGenWorkItem *const codeGenWorkItem, bool prioritize, bool lock, void* function)
  2531. {
  2532. codeGenWorkItem->VerifyJitMode();
  2533. Js::CodeGenRecyclableData* recyclableData = GatherCodeGenData(codeGenWorkItem->GetFunctionBody(), codeGenWorkItem->GetFunctionBody(), codeGenWorkItem->GetEntryPoint(), codeGenWorkItem, function);
  2534. codeGenWorkItem->SetRecyclableData(recyclableData);
  2535. AutoOptionalCriticalSection autoLock(lock ? Processor()->GetCriticalSection() : nullptr);
  2536. scriptContext->GetThreadContext()->RegisterCodeGenRecyclableData(recyclableData);
  2537. // If we have added a lot of jobs that are still waiting to be jitted, remove the oldest job
  2538. // to ensure we do not spend time jitting stale work items.
  2539. const ExecutionMode jitMode = codeGenWorkItem->GetJitMode();
  2540. if(jitMode == ExecutionMode::FullJit &&
  2541. queuedFullJitWorkItemCount >= (unsigned int)CONFIG_FLAG(JitQueueThreshold))
  2542. {
  2543. CodeGenWorkItem *const workItemRemoved = queuedFullJitWorkItems.Tail()->WorkItem();
  2544. Assert(workItemRemoved->GetJitMode() == ExecutionMode::FullJit);
  2545. if(Processor()->RemoveJob(workItemRemoved))
  2546. {
  2547. queuedFullJitWorkItems.UnlinkFromEnd();
  2548. --queuedFullJitWorkItemCount;
  2549. workItemRemoved->OnRemoveFromJitQueue(this);
  2550. }
  2551. }
  2552. Processor()->AddJob(codeGenWorkItem, prioritize); // This one can throw (really unlikely though), OOM specifically.
  2553. if(jitMode == ExecutionMode::FullJit)
  2554. {
  2555. QueuedFullJitWorkItem *const queuedFullJitWorkItem = codeGenWorkItem->EnsureQueuedFullJitWorkItem();
  2556. if(queuedFullJitWorkItem) // ignore OOM, this work item just won't be removed from the job processor's queue
  2557. {
  2558. if(prioritize)
  2559. {
  2560. queuedFullJitWorkItems.LinkToBeginning(queuedFullJitWorkItem);
  2561. }
  2562. else
  2563. {
  2564. queuedFullJitWorkItems.LinkToEnd(queuedFullJitWorkItem);
  2565. }
  2566. ++queuedFullJitWorkItemCount;
  2567. }
  2568. }
  2569. codeGenWorkItem->OnAddToJitQueue();
  2570. }
  2571. void NativeCodeGenerator::AddWorkItem(CodeGenWorkItem* workitem)
  2572. {
  2573. workitem->ResetJitMode();
  2574. workItems.LinkToEnd(workitem);
  2575. }
  2576. Js::ScriptContextProfiler * NativeCodeGenerator::GetBackgroundCodeGenProfiler(PageAllocator *allocator)
  2577. {
  2578. #ifdef PROFILE_EXEC
  2579. if (Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag))
  2580. {
  2581. Js::ScriptContextProfiler *codegenProfiler = this->backgroundCodeGenProfiler;
  2582. while (codegenProfiler)
  2583. {
  2584. if (codegenProfiler->pageAllocator == allocator)
  2585. {
  2586. if (!codegenProfiler->IsInitialized())
  2587. {
  2588. codegenProfiler->Initialize(allocator, nullptr);
  2589. }
  2590. return codegenProfiler;
  2591. }
  2592. codegenProfiler = codegenProfiler->next;
  2593. }
  2594. Assert(false);
  2595. }
  2596. return nullptr;
  2597. #else
  2598. return nullptr;
  2599. #endif
  2600. }
  2601. void NativeCodeGenerator::AllocateBackgroundCodeGenProfiler(PageAllocator *pageAllocator)
  2602. {
  2603. #ifdef PROFILE_EXEC
  2604. if (Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag))
  2605. {
  2606. Js::ScriptContextProfiler *codegenProfiler = NoCheckHeapNew(Js::ScriptContextProfiler);
  2607. codegenProfiler->pageAllocator = pageAllocator;
  2608. codegenProfiler->next = this->backgroundCodeGenProfiler;
  2609. this->backgroundCodeGenProfiler = codegenProfiler;
  2610. }
  2611. #endif
  2612. }
  2613. bool NativeCodeGenerator::TryAggressiveInlining(Js::FunctionBody *const topFunctionBody, Js::FunctionBody *const inlineeFunctionBody, InliningDecider &inliningDecider, uint& inlineeCount, uint recursiveInlineDepth)
  2614. {
  2615. PROBE_STACK(scriptContext, Js::Constants::MinStackDefault);
  2616. if (!inlineeFunctionBody->GetProfiledCallSiteCount())
  2617. {
  2618. // Nothing to inline. See this as fully inlinable function.
  2619. return true;
  2620. }
  2621. class AutoCleanup
  2622. {
  2623. private:
  2624. Js::FunctionBody *const functionBody;
  2625. public:
  2626. AutoCleanup(Js::FunctionBody *const functionBody) : functionBody(functionBody)
  2627. {
  2628. functionBody->OnBeginInlineInto();
  2629. }
  2630. ~AutoCleanup()
  2631. {
  2632. functionBody->OnEndInlineInto();
  2633. }
  2634. } autoCleanup(inlineeFunctionBody);
  2635. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  2636. class AutoTrace
  2637. {
  2638. Js::FunctionBody *const topFunc;
  2639. Js::FunctionBody *const inlineeFunc;
  2640. uint32& inlineeCount;
  2641. bool done;
  2642. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  2643. wchar_t debugStringBuffer2[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  2644. public:
  2645. AutoTrace(Js::FunctionBody *const topFunctionBody, Js::FunctionBody *const inlineeFunctionBody, uint32& inlineeCount) : topFunc(topFunctionBody),
  2646. inlineeFunc(inlineeFunctionBody), done(false), inlineeCount(inlineeCount)
  2647. {
  2648. if (topFunc == inlineeFunc)
  2649. {
  2650. INLINE_TESTTRACE(L"INLINING: Recursive tryAggressiveInlining started topFunc: %s (%s)\n", topFunc->GetDisplayName(),
  2651. topFunc->GetDebugNumberSet(debugStringBuffer))
  2652. }
  2653. }
  2654. void Done(bool success)
  2655. {
  2656. if (success)
  2657. {
  2658. done = true;
  2659. if (topFunc == inlineeFunc)
  2660. {
  2661. INLINE_TESTTRACE(L"INLINING: Recursive tryAggressiveInlining succeeded topFunc: %s (%s), inlinee count: %d\n", topFunc->GetDisplayName(),
  2662. topFunc->GetDebugNumberSet(debugStringBuffer), inlineeCount);
  2663. }
  2664. else
  2665. {
  2666. INLINE_TESTTRACE(L"INLINING: TryAggressiveInlining succeeded topFunc: %s (%s), inlinee: %s (%s) \n", topFunc->GetDisplayName(),
  2667. topFunc->GetDebugNumberSet(debugStringBuffer),
  2668. inlineeFunc->GetDisplayName(),
  2669. inlineeFunc->GetDebugNumberSet(debugStringBuffer2));
  2670. }
  2671. }
  2672. else
  2673. {
  2674. Assert(done == false);
  2675. }
  2676. }
  2677. void TraceFailure(const wchar_t *message)
  2678. {
  2679. INLINE_TESTTRACE(L"INLINING: TryAggressiveInlining failed topFunc (%s): %s (%s), inlinee: %s (%s) \n", message, topFunc->GetDisplayName(),
  2680. topFunc->GetDebugNumberSet(debugStringBuffer),
  2681. inlineeFunc->GetDisplayName(),
  2682. inlineeFunc->GetDebugNumberSet(debugStringBuffer2));
  2683. }
  2684. ~AutoTrace()
  2685. {
  2686. if (!done)
  2687. {
  2688. if (topFunc == inlineeFunc)
  2689. {
  2690. INLINE_TESTTRACE(L"INLINING: Recursive tryAggressiveInlining failed topFunc: %s (%s)\n", topFunc->GetDisplayName(),
  2691. topFunc->GetDebugNumberSet(debugStringBuffer));
  2692. }
  2693. else
  2694. {
  2695. INLINE_TESTTRACE(L"INLINING: TryAggressiveInlining failed topFunc: %s (%s), inlinee: %s (%s) \n", topFunc->GetDisplayName(),
  2696. topFunc->GetDebugNumberSet(debugStringBuffer),
  2697. inlineeFunc->GetDisplayName(),
  2698. inlineeFunc->GetDebugNumberSet(debugStringBuffer2));
  2699. }
  2700. }
  2701. }
  2702. };
  2703. AutoTrace trace(topFunctionBody, inlineeFunctionBody, inlineeCount);
  2704. #endif
  2705. if (inlineeFunctionBody->GetProfiledSwitchCount())
  2706. {
  2707. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  2708. trace.TraceFailure(L"Switch statement in inlinee");
  2709. #endif
  2710. return false;
  2711. }
  2712. bool isInlinee = topFunctionBody != inlineeFunctionBody;
  2713. if (isInlinee ? !inliningDecider.InlineIntoInliner(inlineeFunctionBody) : !inliningDecider.InlineIntoTopFunc())
  2714. {
  2715. return false;
  2716. }
  2717. const auto profiledCallSiteCount = inlineeFunctionBody->GetProfiledCallSiteCount();
  2718. for (Js::ProfileId profiledCallSiteId = 0; profiledCallSiteId < profiledCallSiteCount; ++profiledCallSiteId)
  2719. {
  2720. bool isConstructorCall = false;
  2721. bool isPolymorphicCall = false;
  2722. if (!inliningDecider.HasCallSiteInfo(inlineeFunctionBody, profiledCallSiteId))
  2723. {
  2724. //There is no callsite information. We should hit bailonnoprofile for these callsites. Ignore.
  2725. continue;
  2726. }
  2727. Js::FunctionInfo *inlinee = inliningDecider.GetCallSiteFuncInfo(inlineeFunctionBody, profiledCallSiteId, &isConstructorCall, &isPolymorphicCall);
  2728. if (!inlinee)
  2729. {
  2730. if (isPolymorphicCall)
  2731. {
  2732. //Try and see if this polymorphic call
  2733. Js::FunctionBody* inlineeFunctionBodyArray[Js::DynamicProfileInfo::maxPolymorphicInliningSize] = { 0 };
  2734. bool canInlineArray[Js::DynamicProfileInfo::maxPolymorphicInliningSize] = { 0 };
  2735. uint polyInlineeCount = inliningDecider.InlinePolymorphicCallSite(inlineeFunctionBody, profiledCallSiteId, inlineeFunctionBodyArray,
  2736. Js::DynamicProfileInfo::maxPolymorphicInliningSize, canInlineArray);
  2737. //We should be able to inline everything here.
  2738. if (polyInlineeCount >= 2)
  2739. {
  2740. for (uint i = 0; i < polyInlineeCount; i++)
  2741. {
  2742. bool isInlined = canInlineArray[i];
  2743. if (isInlined)
  2744. {
  2745. ++inlineeCount;
  2746. if (!TryAggressiveInlining(topFunctionBody, inlineeFunctionBodyArray[i], inliningDecider, inlineeCount, inlineeFunctionBody == inlineeFunctionBodyArray[i] ? recursiveInlineDepth + 1 : 0))
  2747. {
  2748. return false;
  2749. }
  2750. }
  2751. else
  2752. {
  2753. return false;
  2754. }
  2755. }
  2756. }
  2757. else
  2758. {
  2759. return false;
  2760. }
  2761. }
  2762. else
  2763. {
  2764. return false;
  2765. }
  2766. }
  2767. else
  2768. {
  2769. inlinee = inliningDecider.Inline(inlineeFunctionBody, inlinee, isConstructorCall, false, inliningDecider.GetConstantArgInfo(inlineeFunctionBody, profiledCallSiteId), profiledCallSiteId, inlineeFunctionBody == inlinee ? recursiveInlineDepth + 1 : 0, true);
  2770. if (!inlinee)
  2771. {
  2772. return false;
  2773. }
  2774. Js::FunctionBody *const functionBody = inlinee->GetFunctionBody();
  2775. if (!functionBody)
  2776. {
  2777. //Built-in
  2778. continue;
  2779. }
  2780. //Recursive call
  2781. ++inlineeCount;
  2782. if (!TryAggressiveInlining(topFunctionBody, functionBody, inliningDecider, inlineeCount, inlineeFunctionBody == functionBody ? recursiveInlineDepth + 1 : 0 ))
  2783. {
  2784. return false;
  2785. }
  2786. }
  2787. }
  2788. #if defined(DBG_DUMP) || defined(ENABLE_DEBUG_CONFIG_OPTIONS)
  2789. trace.Done(true);
  2790. #endif
  2791. return true;
  2792. }