Func.cpp 51 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759
  1. //-------------------------------------------------------------------------------------------------------
  2. // Copyright (C) Microsoft. All rights reserved.
  3. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
  4. //-------------------------------------------------------------------------------------------------------
  5. #include "BackEnd.h"
  6. #include "Base\EtwTrace.h"
  7. #include "Base\ScriptContextProfiler.h"
  8. Func::Func(JitArenaAllocator *alloc, CodeGenWorkItem* workItem, const Js::FunctionCodeGenRuntimeData *const runtimeData,
  9. Js::PolymorphicInlineCacheInfo * const polymorphicInlineCacheInfo, CodeGenAllocators *const codeGenAllocators,
  10. CodeGenNumberAllocator * numberAllocator, Js::ReadOnlyDynamicProfileInfo *const profileInfo,
  11. Js::ScriptContextProfiler *const codeGenProfiler, const bool isBackgroundJIT, Func * parentFunc,
  12. uint postCallByteCodeOffset, Js::RegSlot returnValueRegSlot, const bool isInlinedConstructor,
  13. Js::ProfileId callSiteIdInParentFunc, bool isGetterSetter) :
  14. m_alloc(alloc),
  15. m_workItem(workItem),
  16. m_jitTimeData(workItem->RecyclableData()->JitTimeData()),
  17. m_runtimeData(runtimeData),
  18. m_polymorphicInlineCacheInfo(polymorphicInlineCacheInfo),
  19. m_codeGenAllocators(codeGenAllocators),
  20. m_inlineeId(0),
  21. pinnedTypeRefs(nullptr),
  22. singleTypeGuards(nullptr),
  23. equivalentTypeGuards(nullptr),
  24. propertyGuardsByPropertyId(nullptr),
  25. ctorCachesByPropertyId(nullptr),
  26. callSiteToArgumentsOffsetFixupMap(nullptr),
  27. indexedPropertyGuardCount(0),
  28. propertiesWrittenTo(nullptr),
  29. lazyBailoutProperties(alloc),
  30. anyPropertyMayBeWrittenTo(false),
  31. #ifdef PROFILE_EXEC
  32. m_codeGenProfiler(codeGenProfiler),
  33. #endif
  34. m_isBackgroundJIT(isBackgroundJIT),
  35. m_cloner(nullptr),
  36. m_cloneMap(nullptr),
  37. m_loopParamSym(nullptr),
  38. m_funcObjSym(nullptr),
  39. m_localClosureSym(nullptr),
  40. m_localFrameDisplaySym(nullptr),
  41. m_bailoutReturnValueSym(nullptr),
  42. m_hasBailedOutSym(nullptr),
  43. m_inlineeFrameStartSym(nullptr),
  44. m_regsUsed(0),
  45. m_fg(nullptr),
  46. m_labelCount(0),
  47. m_argSlotsForFunctionsCalled(0),
  48. m_isLeaf(false),
  49. m_hasCalls(false),
  50. m_hasInlineArgsOpt(false),
  51. m_canDoInlineArgsOpt(true),
  52. m_doFastPaths(false),
  53. hasBailout(false),
  54. hasBailoutInEHRegion(false),
  55. hasInstrNumber(false),
  56. maintainByteCodeOffset(true),
  57. frameSize(0),
  58. parentFunc(parentFunc),
  59. argObjSyms(nullptr),
  60. m_nonTempLocalVars(nullptr),
  61. hasAnyStackNestedFunc(false),
  62. hasMarkTempObjects(false),
  63. postCallByteCodeOffset(postCallByteCodeOffset),
  64. maxInlineeArgOutCount(0),
  65. returnValueRegSlot(returnValueRegSlot),
  66. firstActualStackOffset(-1),
  67. m_localVarSlotsOffset(Js::Constants::InvalidOffset),
  68. m_hasLocalVarChangedOffset(Js::Constants::InvalidOffset),
  69. actualCount((Js::ArgSlot) - 1),
  70. tryCatchNestingLevel(0),
  71. m_localStackHeight(0),
  72. tempSymDouble(nullptr),
  73. hasInlinee(false),
  74. thisOrParentInlinerHasArguments(false),
  75. hasStackArgs(false),
  76. hasArgumentObject(false),
  77. hasUnoptimizedArgumentsAcccess(false),
  78. hasApplyTargetInlining(false),
  79. hasImplicitCalls(false),
  80. hasTempObjectProducingInstr(false),
  81. isInlinedConstructor(isInlinedConstructor),
  82. numberAllocator(numberAllocator),
  83. profileInfo(profileInfo),
  84. loopCount(0),
  85. callSiteIdInParentFunc(callSiteIdInParentFunc),
  86. isGetterSetter(isGetterSetter),
  87. frameInfo(nullptr),
  88. isTJLoopBody(false),
  89. isFlowGraphValid(false),
  90. #if DBG
  91. m_callSiteCount(0),
  92. #endif
  93. stackNestedFunc(false),
  94. stackClosure(false)
  95. #if defined(_M_ARM32_OR_ARM64)
  96. , m_ArgumentsOffset(0)
  97. , m_epilogLabel(nullptr)
  98. #endif
  99. , m_funcStartLabel(nullptr)
  100. , m_funcEndLabel(nullptr)
  101. #ifdef _M_X64
  102. , m_prologEncoder(alloc)
  103. #endif
  104. #if DBG
  105. , hasCalledSetDoFastPaths(false)
  106. , allowRemoveBailOutArgInstr(false)
  107. , currentPhases(alloc)
  108. , isPostLower(false)
  109. , isPostRegAlloc(false)
  110. , isPostPeeps(false)
  111. , isPostLayout(false)
  112. , isPostFinalLower(false)
  113. , vtableMap(nullptr)
  114. #endif
  115. , m_yieldOffsetResumeLabelList(nullptr)
  116. , m_bailOutNoSaveLabel(nullptr)
  117. , constantAddressRegOpnd(alloc)
  118. , lastConstantAddressRegLoadInstr(nullptr)
  119. , m_totalJumpTableSizeInBytesForSwitchStatements(0)
  120. , slotArrayCheckTable(nullptr)
  121. , frameDisplayCheckTable(nullptr)
  122. {
  123. Assert(this->IsInlined() == !!runtimeData);
  124. if (this->IsInlined())
  125. {
  126. m_inlineeId = ++(GetTopFunc()->m_inlineeId);
  127. }
  128. m_jnFunction = m_workItem->GetFunctionBody();
  129. bool doStackNestedFunc = m_jnFunction->DoStackNestedFunc();
  130. bool doStackClosure = m_jnFunction->DoStackClosure() && !PHASE_OFF(Js::FrameDisplayFastPathPhase, this);
  131. Assert(!doStackClosure || doStackNestedFunc);
  132. this->stackClosure = doStackClosure && this->IsTopFunc();
  133. if (this->stackClosure)
  134. {
  135. m_workItem->GetEntryPoint()->SetHasJittedStackClosure();
  136. }
  137. if (m_workItem->Type() == JsFunctionType)
  138. {
  139. if (m_jnFunction->GetDoBackendArgumentsOptimization() && !m_jnFunction->GetHasTry())
  140. {
  141. // doBackendArgumentsOptimization bit is set when there is no eval inside a function
  142. // as determined by the bytecode generator.
  143. SetHasStackArgs(true);
  144. }
  145. if (doStackNestedFunc && m_jnFunction->GetNestedCount() != 0)
  146. {
  147. Assert(!(this->IsJitInDebugMode() && !m_jnFunction->GetUtf8SourceInfo()->GetIsLibraryCode()));
  148. stackNestedFunc = true;
  149. this->GetTopFunc()->hasAnyStackNestedFunc = true;
  150. }
  151. }
  152. else
  153. {
  154. Assert(m_workItem->Type() == JsLoopBodyWorkItemType);
  155. }
  156. if (m_jnFunction->GetHasOrParentHasArguments() || parentFunc && parentFunc->thisOrParentInlinerHasArguments)
  157. {
  158. thisOrParentInlinerHasArguments = true;
  159. }
  160. if (parentFunc == nullptr)
  161. {
  162. inlineDepth = 0;
  163. m_symTable = JitAnew(alloc, SymTable);
  164. m_symTable->Init(this);
  165. Assert(Js::Constants::NoByteCodeOffset == postCallByteCodeOffset);
  166. Assert(Js::Constants::NoRegister == returnValueRegSlot);
  167. #if defined(_M_IX86) || defined(_M_X64)
  168. if (HasArgumentSlot())
  169. {
  170. // Pre-allocate the single argument slot we'll reserve for the arguments object.
  171. // For ARM, the argument slot is not part of the local but part of the register saves
  172. m_localStackHeight = MachArgsSlotOffset;
  173. }
  174. #endif
  175. }
  176. else
  177. {
  178. inlineDepth = parentFunc->inlineDepth + 1;
  179. Assert(Js::Constants::NoByteCodeOffset != postCallByteCodeOffset);
  180. }
  181. this->constructorCacheCount = 0;
  182. this->constructorCaches = AnewArrayZ(this->m_alloc, Js::JitTimeConstructorCache*, this->m_jnFunction->GetProfiledCallSiteCount());
  183. #if DBG_DUMP
  184. m_codeSize = -1;
  185. #endif
  186. #if defined(_M_X64)
  187. m_spillSize = -1;
  188. m_argsSize = -1;
  189. m_savedRegSize = -1;
  190. #endif
  191. if (this->IsJitInDebugMode())
  192. {
  193. m_nonTempLocalVars = Anew(this->m_alloc, BVSparse<JitArenaAllocator>, this->m_alloc);
  194. }
  195. if (this->m_jnFunction->IsGenerator())
  196. {
  197. m_yieldOffsetResumeLabelList = YieldOffsetResumeLabelList::New(this->m_alloc);
  198. }
  199. canHoistConstantAddressLoad = !PHASE_OFF(Js::HoistConstAddrPhase, this);
  200. }
  201. bool
  202. Func::IsLoopBody() const
  203. {
  204. return this->m_workItem->Type() == JsLoopBodyWorkItemType;
  205. }
  206. bool
  207. Func::IsLoopBodyInTry() const
  208. {
  209. return IsLoopBody() && ((JsLoopBodyCodeGen*)this->m_workItem)->loopHeader->isInTry;
  210. }
  211. ///----------------------------------------------------------------------------
  212. ///
  213. /// Func::Codegen
  214. ///
  215. /// Codegen this function.
  216. ///
  217. ///----------------------------------------------------------------------------
  218. void
  219. Func::Codegen()
  220. {
  221. Assert(!IsJitInDebugMode() || !m_jnFunction->GetHasTry());
  222. Js::ScriptContext* scriptContext = this->GetScriptContext();
  223. {
  224. if(IS_JS_ETW(EventEnabledJSCRIPT_FUNCTION_JIT_START()))
  225. {
  226. WCHAR displayNameBuffer[256];
  227. WCHAR* displayName = displayNameBuffer;
  228. size_t sizeInChars = this->m_workItem->GetDisplayName(displayName, 256);
  229. if(sizeInChars > 256)
  230. {
  231. displayName = new WCHAR[sizeInChars];
  232. this->m_workItem->GetDisplayName(displayName, 256);
  233. }
  234. JS_ETW(EventWriteJSCRIPT_FUNCTION_JIT_START(
  235. this->GetFunctionNumber(),
  236. displayName,
  237. this->GetScriptContext(),
  238. this->m_workItem->GetInterpretedCount(),
  239. (const unsigned int)this->m_jnFunction->LengthInBytes(),
  240. this->m_jnFunction->GetByteCodeCount(),
  241. this->m_jnFunction->GetByteCodeInLoopCount(),
  242. (int)this->m_workItem->GetJitMode()));
  243. if(displayName != displayNameBuffer)
  244. {
  245. delete[] displayName;
  246. }
  247. }
  248. }
  249. #if DBG_DUMP
  250. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::BackEndPhase))
  251. {
  252. if (this->IsLoopBody())
  253. {
  254. Output::Print(L"---BeginBackEnd: function: %s, loop:%d---\r\n", this->GetJnFunction()->GetDisplayName(),
  255. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber());
  256. }
  257. else
  258. {
  259. Output::Print(L"---BeginBackEnd: function: %s---\r\n", this->GetJnFunction()->GetDisplayName());
  260. }
  261. Output::Flush();
  262. }
  263. #endif
  264. wchar_t debugStringBuffer[MAX_FUNCTION_BODY_DEBUG_STRING_SIZE];
  265. LARGE_INTEGER start_time = { 0 };
  266. if(PHASE_TRACE(Js::BackEndPhase, GetJnFunction()))
  267. {
  268. QueryPerformanceCounter(&start_time);
  269. if (this->IsLoopBody())
  270. {
  271. Output::Print(
  272. L"BeginBackEnd - function: %s (%s, line %u), loop: %u, mode: %S",
  273. GetJnFunction()->GetDisplayName(),
  274. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  275. GetJnFunction()->GetLineNumber(),
  276. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber(),
  277. ExecutionModeName(m_workItem->GetJitMode()));
  278. if (this->m_jnFunction->GetIsAsmjsMode())
  279. {
  280. Output::Print(L" (Asmjs)\n");
  281. }
  282. else
  283. {
  284. Output::Print(L"\n");
  285. }
  286. }
  287. else
  288. {
  289. Output::Print(
  290. L"BeginBackEnd - function: %s (%s, line %u), mode: %S",
  291. GetJnFunction()->GetDisplayName(),
  292. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  293. GetJnFunction()->GetLineNumber(),
  294. ExecutionModeName(m_workItem->GetJitMode()));
  295. if (this->m_jnFunction->GetIsAsmjsMode())
  296. {
  297. Output::Print(L" (Asmjs)\n");
  298. }
  299. else
  300. {
  301. Output::Print(L"\n");
  302. }
  303. }
  304. Output::Flush();
  305. }
  306. #ifdef FIELD_ACCESS_STATS
  307. if (PHASE_TRACE(Js::ObjTypeSpecPhase, this->GetJnFunction()) || PHASE_TRACE(Js::EquivObjTypeSpecPhase, this->GetJnFunction()))
  308. {
  309. if (this->m_jitTimeData->inlineCacheStats)
  310. {
  311. auto stats = this->m_jitTimeData->inlineCacheStats;
  312. Output::Print(L"ObjTypeSpec: jitting function %s (#%s): inline cache stats:\n", this->GetJnFunction()->GetDisplayName(), this->GetJnFunction()->GetDebugNumberSet(debugStringBuffer));
  313. Output::Print(L" overall: total %u, no profile info %u\n", stats->totalInlineCacheCount, stats->noInfoInlineCacheCount);
  314. Output::Print(L" mono: total %u, empty %u, cloned %u\n",
  315. stats->monoInlineCacheCount, stats->emptyMonoInlineCacheCount, stats->clonedMonoInlineCacheCount);
  316. Output::Print(L" poly: total %u (high %u, low %u), null %u, empty %u, ignored %u, disabled %u, equivalent %u, non-equivalent %u, cloned %u\n",
  317. stats->polyInlineCacheCount, stats->highUtilPolyInlineCacheCount, stats->lowUtilPolyInlineCacheCount,
  318. stats->nullPolyInlineCacheCount, stats->emptyPolyInlineCacheCount, stats->ignoredPolyInlineCacheCount, stats->disabledPolyInlineCacheCount,
  319. stats->equivPolyInlineCacheCount, stats->nonEquivPolyInlineCacheCount, stats->clonedPolyInlineCacheCount);
  320. }
  321. else
  322. {
  323. Output::Print(L"EquivObjTypeSpec: function %s (%s): inline cache stats unavailable\n", this->GetJnFunction()->GetDisplayName(), this->GetJnFunction()->GetDebugNumberSet(debugStringBuffer));
  324. }
  325. Output::Flush();
  326. }
  327. #endif
  328. BEGIN_CODEGEN_PHASE(this, Js::BackEndPhase);
  329. {
  330. // IRBuilder
  331. BEGIN_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  332. if (m_jnFunction->GetIsAsmjsMode())
  333. {
  334. IRBuilderAsmJs asmIrBuilder(this);
  335. asmIrBuilder.Build();
  336. }
  337. else
  338. {
  339. IRBuilder irBuilder(this);
  340. irBuilder.Build();
  341. }
  342. END_CODEGEN_PHASE(this, Js::IRBuilderPhase);
  343. #ifdef IR_VIEWER
  344. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::IRBuilderPhase);
  345. #endif /* IR_VIEWER */
  346. BEGIN_CODEGEN_PHASE(this, Js::InlinePhase);
  347. InliningHeuristics heuristics(this->GetJnFunction());
  348. Inline inliner(this, heuristics);
  349. inliner.Optimize();
  350. END_CODEGEN_PHASE(this, Js::InlinePhase);
  351. if (scriptContext->IsClosed())
  352. {
  353. // Should not be jitting something in the foreground when the script context is actually closed
  354. Assert(IsBackgroundJIT() || !scriptContext->IsActuallyClosed());
  355. throw Js::OperationAbortedException();
  356. }
  357. // FlowGraph
  358. {
  359. // Scope for FlowGraph arena
  360. NoRecoverMemoryJitArenaAllocator fgAlloc(L"BE-FlowGraph", m_alloc->GetPageAllocator(), Js::Throw::OutOfMemory);
  361. BEGIN_CODEGEN_PHASE(this, Js::FGBuildPhase);
  362. this->m_fg = FlowGraph::New(this, &fgAlloc);
  363. this->m_fg->Build();
  364. END_CODEGEN_PHASE(this, Js::FGBuildPhase);
  365. // Global Optimization and Type Specialization
  366. BEGIN_CODEGEN_PHASE(this, Js::GlobOptPhase);
  367. GlobOpt globOpt(this);
  368. globOpt.Optimize();
  369. END_CODEGEN_PHASE(this, Js::GlobOptPhase);
  370. // Delete flowGraph now
  371. this->m_fg->Destroy();
  372. this->m_fg = nullptr;
  373. }
  374. #ifdef IR_VIEWER
  375. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::GlobOptPhase);
  376. #endif /* IR_VIEWER */
  377. ThrowIfScriptClosed();
  378. // Lowering
  379. Lowerer lowerer(this);
  380. BEGIN_CODEGEN_PHASE(this, Js::LowererPhase);
  381. lowerer.Lower();
  382. END_CODEGEN_PHASE(this, Js::LowererPhase);
  383. #ifdef IR_VIEWER
  384. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::LowererPhase);
  385. #endif /* IR_VIEWER */
  386. // Encode constants
  387. Security security(this);
  388. BEGIN_CODEGEN_PHASE(this, Js::EncodeConstantsPhase)
  389. security.EncodeLargeConstants();
  390. END_CODEGEN_PHASE(this, Js::EncodeConstantsPhase);
  391. if (this->GetScriptContext()->GetThreadContext()->DoInterruptProbe(this->GetJnFunction()))
  392. {
  393. BEGIN_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  394. lowerer.DoInterruptProbes();
  395. END_CODEGEN_PHASE(this, Js::InterruptProbePhase)
  396. }
  397. // Register Allocation
  398. BEGIN_CODEGEN_PHASE(this, Js::RegAllocPhase);
  399. LinearScan linearScan(this);
  400. linearScan.RegAlloc();
  401. END_CODEGEN_PHASE(this, Js::RegAllocPhase);
  402. #ifdef IR_VIEWER
  403. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::RegAllocPhase);
  404. #endif /* IR_VIEWER */
  405. ThrowIfScriptClosed();
  406. // Peephole optimizations
  407. BEGIN_CODEGEN_PHASE(this, Js::PeepsPhase);
  408. Peeps peeps(this);
  409. peeps.PeepFunc();
  410. END_CODEGEN_PHASE(this, Js::PeepsPhase);
  411. // Layout
  412. BEGIN_CODEGEN_PHASE(this, Js::LayoutPhase);
  413. SimpleLayout layout(this);
  414. layout.Layout();
  415. END_CODEGEN_PHASE(this, Js::LayoutPhase);
  416. if (this->HasTry() && this->hasBailoutInEHRegion)
  417. {
  418. BEGIN_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  419. lowerer.EHBailoutPatchUp();
  420. END_CODEGEN_PHASE(this, Js::EHBailoutPatchUpPhase);
  421. }
  422. // Insert NOPs (moving this before prolog/epilog for AMD64 and possibly ARM).
  423. BEGIN_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  424. security.InsertNOPs();
  425. END_CODEGEN_PHASE(this, Js::InsertNOPsPhase);
  426. // Prolog/Epilog
  427. BEGIN_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  428. if (m_jnFunction->GetIsAsmjsMode())
  429. {
  430. lowerer.LowerPrologEpilogAsmJs();
  431. }
  432. else
  433. {
  434. lowerer.LowerPrologEpilog();
  435. }
  436. END_CODEGEN_PHASE(this, Js::PrologEpilogPhase);
  437. BEGIN_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  438. lowerer.FinalLower();
  439. END_CODEGEN_PHASE(this, Js::FinalLowerPhase);
  440. // Encoder
  441. BEGIN_CODEGEN_PHASE(this, Js::EncoderPhase);
  442. Encoder encoder(this);
  443. encoder.Encode();
  444. END_CODEGEN_PHASE_NO_DUMP(this, Js::EncoderPhase);
  445. #ifdef IR_VIEWER
  446. IRtoJSObjectBuilder::DumpIRtoGlobalObject(this, Js::EncoderPhase);
  447. #endif /* IR_VIEWER */
  448. }
  449. END_CODEGEN_PHASE(this, Js::BackEndPhase);
  450. #if DBG_DUMP
  451. if (Js::Configuration::Global.flags.TestTrace.IsEnabled(Js::BackEndPhase))
  452. {
  453. Output::Print(L"---EndBackEnd---\r\n");
  454. Output::Flush();
  455. }
  456. #endif
  457. #ifdef PROFILE_BAILOUT_RECORD_MEMORY
  458. if (Js::Configuration::Global.flags.ProfileBailOutRecordMemory)
  459. {
  460. scriptContext->codeSize += this->m_codeSize;
  461. }
  462. #endif
  463. if (PHASE_TRACE(Js::BackEndPhase, GetJnFunction()))
  464. {
  465. LARGE_INTEGER freq;
  466. LARGE_INTEGER end_time;
  467. QueryPerformanceCounter(&end_time);
  468. QueryPerformanceFrequency(&freq);
  469. if (this->IsLoopBody())
  470. {
  471. Output::Print(
  472. L"EndBackEnd - function: %s (%s, line %u), loop: %u, mode: %S, time:%8.6f mSec",
  473. GetJnFunction()->GetDisplayName(),
  474. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  475. GetJnFunction()->GetLineNumber(),
  476. static_cast<JsLoopBodyCodeGen *>(this->m_workItem)->GetLoopNumber(),
  477. ExecutionModeName(m_workItem->GetJitMode()),
  478. (((double)((end_time.QuadPart - start_time.QuadPart)* (double)1000.0 / (double)freq.QuadPart))) / (1));
  479. if (this->m_jnFunction->GetIsAsmjsMode())
  480. {
  481. Output::Print(L" (Asmjs)\n");
  482. }
  483. else
  484. {
  485. Output::Print(L"\n");
  486. }
  487. }
  488. else
  489. {
  490. Output::Print(
  491. L"EndBackEnd - function: %s (%s, line %u), mode: %S time:%8.6f mSec",
  492. GetJnFunction()->GetDisplayName(),
  493. GetJnFunction()->GetDebugNumberSet(debugStringBuffer),
  494. GetJnFunction()->GetLineNumber(),
  495. ExecutionModeName(m_workItem->GetJitMode()),
  496. (((double)((end_time.QuadPart - start_time.QuadPart)* (double)1000.0 / (double)freq.QuadPart))) / (1));
  497. if (this->m_jnFunction->GetIsAsmjsMode())
  498. {
  499. Output::Print(L" (Asmjs)\n");
  500. }
  501. else
  502. {
  503. Output::Print(L"\n");
  504. }
  505. }
  506. Output::Flush();
  507. }
  508. {
  509. if(IS_JS_ETW(EventEnabledJSCRIPT_FUNCTION_JIT_STOP()))
  510. {
  511. WCHAR displayNameBuffer[256];
  512. WCHAR* displayName = displayNameBuffer;
  513. size_t sizeInChars = this->m_workItem->GetDisplayName(displayName, 256);
  514. if(sizeInChars > 256)
  515. {
  516. displayName = new WCHAR[sizeInChars];
  517. this->m_workItem->GetDisplayName(displayName, 256);
  518. }
  519. void* entryPoint;
  520. ptrdiff_t codeSize;
  521. this->m_workItem->GetEntryPointAddress(&entryPoint, &codeSize);
  522. JS_ETW(EventWriteJSCRIPT_FUNCTION_JIT_STOP(
  523. this->GetFunctionNumber(),
  524. displayName,
  525. scriptContext,
  526. this->m_workItem->GetInterpretedCount(),
  527. entryPoint,
  528. codeSize));
  529. if(displayName != displayNameBuffer)
  530. {
  531. delete[] displayName;
  532. }
  533. }
  534. }
  535. #if DBG_DUMP
  536. if (Js::Configuration::Global.flags.IsEnabled(Js::AsmDumpModeFlag))
  537. {
  538. FILE * oldFile = 0;
  539. FILE * asmFile = scriptContext->GetNativeCodeGenerator()->asmFile;
  540. if (asmFile)
  541. {
  542. oldFile = Output::SetFile(asmFile);
  543. }
  544. this->Dump(IRDumpFlags_AsmDumpMode);
  545. Output::Flush();
  546. if (asmFile)
  547. {
  548. FILE *openedFile = Output::SetFile(oldFile);
  549. Assert(openedFile == asmFile);
  550. }
  551. }
  552. #endif
  553. }
  554. ///----------------------------------------------------------------------------
  555. /// Func::StackAllocate
  556. /// Allocate stack space of given size.
  557. ///----------------------------------------------------------------------------
  558. int32
  559. Func::StackAllocate(int size)
  560. {
  561. Assert(this->IsTopFunc());
  562. int32 offset;
  563. #ifdef MD_GROW_LOCALS_AREA_UP
  564. // Locals have positive offsets and are allocated from bottom to top.
  565. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  566. offset = m_localStackHeight;
  567. m_localStackHeight += size;
  568. #else
  569. // Locals have negative offsets and are allocated from top to bottom.
  570. m_localStackHeight += size;
  571. m_localStackHeight = Math::Align(m_localStackHeight, min(size, MachStackAlignment));
  572. offset = -m_localStackHeight;
  573. #endif
  574. return offset;
  575. }
  576. ///----------------------------------------------------------------------------
  577. ///
  578. /// Func::StackAllocate
  579. ///
  580. /// Allocate stack space for this symbol.
  581. ///
  582. ///----------------------------------------------------------------------------
  583. int32
  584. Func::StackAllocate(StackSym *stackSym, int size)
  585. {
  586. Assert(size > 0);
  587. if (stackSym->IsArgSlotSym() || stackSym->IsParamSlotSym() || stackSym->IsAllocated())
  588. {
  589. return stackSym->m_offset;
  590. }
  591. Assert(stackSym->m_offset == 0);
  592. stackSym->m_allocated = true;
  593. stackSym->m_offset = StackAllocate(size);
  594. return stackSym->m_offset;
  595. }
  596. void
  597. Func::SetArgOffset(StackSym *stackSym, int32 offset)
  598. {
  599. AssertMsg(offset >= 0, "Why is the offset, negative?");
  600. stackSym->m_offset = offset;
  601. stackSym->m_allocated = true;
  602. }
  603. ///
  604. /// Ensures that local var slots are created, if the function has locals.
  605. /// Allocate stack space for locals used for debugging
  606. /// (for local non-temp vars we write-through memory so that locals inspection can make use of that.).
  607. // On stack, after local slots we allocate space for metadata (in particular, whether any the locals was changed in debugger).
  608. ///
  609. void
  610. Func::EnsureLocalVarSlots()
  611. {
  612. Assert(IsJitInDebugMode());
  613. if (!this->HasLocalVarSlotCreated())
  614. {
  615. Assert(this->m_jnFunction != nullptr);
  616. uint32 localSlotCount = this->m_jnFunction->GetNonTempLocalVarCount();
  617. if (localSlotCount && m_localVarSlotsOffset == Js::Constants::InvalidOffset)
  618. {
  619. // Allocate the slots.
  620. int32 size = localSlotCount * GetDiagLocalSlotSize();
  621. m_localVarSlotsOffset = StackAllocate(size);
  622. m_hasLocalVarChangedOffset = StackAllocate(max(1, MachStackAlignment)); // Can't alloc less than StackAlignment bytes.
  623. Assert(this->m_workItem->Type() == JsFunctionType);
  624. // Store in the entry point info, so that it will later be used when we do the variable inspection.
  625. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  626. Assert(entryPointInfo != nullptr);
  627. entryPointInfo->localVarSlotsOffset = AdjustOffsetValue(m_localVarSlotsOffset);
  628. entryPointInfo->localVarChangedOffset = AdjustOffsetValue(m_hasLocalVarChangedOffset);
  629. }
  630. }
  631. }
  632. void Func::SetFirstArgOffset(IR::Instr* inlineeStart)
  633. {
  634. Assert(inlineeStart->m_func == this);
  635. Assert(!IsTopFunc());
  636. int32 lastOffset;
  637. IR::Instr* arg = inlineeStart->GetNextArg();
  638. const auto lastArgOutStackSym = arg->GetDst()->AsSymOpnd()->m_sym->AsStackSym();
  639. lastOffset = lastArgOutStackSym->m_offset;
  640. Assert(lastArgOutStackSym->m_isSingleDef);
  641. const auto secondLastArgOutOpnd = lastArgOutStackSym->m_instrDef->GetSrc2();
  642. if (secondLastArgOutOpnd->IsSymOpnd())
  643. {
  644. const auto secondLastOffset = secondLastArgOutOpnd->AsSymOpnd()->m_sym->AsStackSym()->m_offset;
  645. if (secondLastOffset > lastOffset)
  646. {
  647. lastOffset = secondLastOffset;
  648. }
  649. }
  650. lastOffset += MachPtr;
  651. int32 firstActualStackOffset = lastOffset - ((this->actualCount + Js::Constants::InlineeMetaArgCount) * MachPtr);
  652. Assert((this->firstActualStackOffset == -1) || (this->firstActualStackOffset == firstActualStackOffset));
  653. this->firstActualStackOffset = firstActualStackOffset;
  654. }
  655. int32
  656. Func::GetLocalVarSlotOffset(int32 slotId)
  657. {
  658. this->EnsureLocalVarSlots();
  659. Assert(m_localVarSlotsOffset != Js::Constants::InvalidOffset);
  660. int32 slotOffset = slotId * GetDiagLocalSlotSize();
  661. return m_localVarSlotsOffset + slotOffset;
  662. }
  663. void Func::OnAddSym(Sym* sym)
  664. {
  665. Assert(sym);
  666. if (this->IsJitInDebugMode() && this->IsNonTempLocalVar(sym->m_id))
  667. {
  668. Assert(m_nonTempLocalVars);
  669. m_nonTempLocalVars->Set(sym->m_id);
  670. }
  671. }
  672. ///
  673. /// Returns offset of the flag (1 byte) whether any local was changed (in debugger).
  674. /// If the function does not have any locals, returns -1.
  675. ///
  676. int32
  677. Func::GetHasLocalVarChangedOffset()
  678. {
  679. this->EnsureLocalVarSlots();
  680. return m_hasLocalVarChangedOffset;
  681. }
  682. bool
  683. Func::IsJitInDebugMode()
  684. {
  685. return
  686. Js::Configuration::Global.EnableJitInDebugMode() &&
  687. this->m_workItem->IsJitInDebugMode();
  688. }
  689. bool
  690. Func::IsNonTempLocalVar(uint32 slotIndex)
  691. {
  692. Assert(this->m_jnFunction != nullptr);
  693. return this->m_jnFunction->IsNonTempLocalVar(slotIndex);
  694. }
  695. int32
  696. Func::AdjustOffsetValue(int32 offset)
  697. {
  698. #ifdef MD_GROW_LOCALS_AREA_UP
  699. return -(offset + BailOutInfo::StackSymBias);
  700. #else
  701. // Stack offset are negative, includes the PUSH EBP and return address
  702. return offset - (2 * MachPtr);
  703. #endif
  704. }
  705. #ifdef MD_GROW_LOCALS_AREA_UP
  706. // Note: this is called during jit-compile when we finalize bail out record.
  707. void
  708. Func::AjustLocalVarSlotOffset()
  709. {
  710. if (m_jnFunction->GetNonTempLocalVarCount())
  711. {
  712. // Turn positive SP-relative base locals offset into negative frame-pointer-relative offset
  713. // This is changing value for restoring the locals when read due to locals inspection.
  714. int localsOffset = m_localVarSlotsOffset - (m_localStackHeight + m_ArgumentsOffset);
  715. int valueChangeOffset = m_hasLocalVarChangedOffset - (m_localStackHeight + m_ArgumentsOffset);
  716. Js::FunctionEntryPointInfo * entryPointInfo = static_cast<Js::FunctionEntryPointInfo*>(this->m_workItem->GetEntryPoint());
  717. Assert(entryPointInfo != nullptr);
  718. entryPointInfo->localVarSlotsOffset = localsOffset;
  719. entryPointInfo->localVarChangedOffset = valueChangeOffset;
  720. }
  721. }
  722. #endif
  723. bool
  724. Func::DoGlobOptsForGeneratorFunc()
  725. {
  726. // Disable GlobOpt optimizations for generators initially. Will visit and enable each one by one.
  727. return !m_jnFunction->IsGenerator();
  728. }
  729. void
  730. Func::SetDoFastPaths()
  731. {
  732. // Make sure we only call this once!
  733. Assert(!this->hasCalledSetDoFastPaths);
  734. bool isLeaf = this->m_isLeaf && !PHASE_OFF(Js::LeafFastPathPhase, this);
  735. bool doFastPaths = false;
  736. if(!PHASE_OFF(Js::FastPathPhase, this) && (!IsSimpleJit() || Js::FunctionBody::IsNewSimpleJit()))
  737. {
  738. if (isLeaf || this->GetScriptContext()->GetThreadContext()->GetSourceSize() < (size_t)CONFIG_FLAG(FastPathCap) || CONFIG_FLAG(ForceFastPath))
  739. {
  740. doFastPaths = true;
  741. }
  742. }
  743. this->m_doFastPaths = doFastPaths;
  744. #ifdef DBG
  745. this->hasCalledSetDoFastPaths = true;
  746. #endif
  747. }
  748. #ifdef _M_ARM
  749. RegNum
  750. Func::GetLocalsPointer() const
  751. {
  752. #ifdef DBG
  753. if (Js::Configuration::Global.flags.IsEnabled(Js::ForceLocalsPtrFlag))
  754. {
  755. return ALT_LOCALS_PTR;
  756. }
  757. #endif
  758. if (this->m_jnFunction->GetHasTry())
  759. {
  760. return ALT_LOCALS_PTR;
  761. }
  762. return RegSP;
  763. }
  764. #endif
  765. void Func::AddSlotArrayCheck(IR::SymOpnd *fieldOpnd)
  766. {
  767. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  768. {
  769. return;
  770. }
  771. Assert(IsTopFunc());
  772. if (this->slotArrayCheckTable == nullptr)
  773. {
  774. this->slotArrayCheckTable = SlotArrayCheckTable::New(m_alloc, 4);
  775. }
  776. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  777. uint32 slot = propertySym->m_propertyId;
  778. uint32 *pSlotId = this->slotArrayCheckTable->FindOrInsert(slot, propertySym->m_stackSym->m_id);
  779. if (pSlotId && (*pSlotId == (uint32)-1 || *pSlotId < slot))
  780. {
  781. *pSlotId = propertySym->m_propertyId;
  782. }
  783. }
  784. void Func::AddFrameDisplayCheck(IR::SymOpnd *fieldOpnd, uint32 slotId)
  785. {
  786. if (PHASE_OFF(Js::ClosureRangeCheckPhase, this))
  787. {
  788. return;
  789. }
  790. Assert(IsTopFunc());
  791. if (this->frameDisplayCheckTable == nullptr)
  792. {
  793. this->frameDisplayCheckTable = FrameDisplayCheckTable::New(m_alloc, 4);
  794. }
  795. PropertySym *propertySym = fieldOpnd->m_sym->AsPropertySym();
  796. FrameDisplayCheckRecord **record = this->frameDisplayCheckTable->FindOrInsertNew(propertySym->m_stackSym->m_id);
  797. if (*record == nullptr)
  798. {
  799. *record = JitAnew(m_alloc, FrameDisplayCheckRecord);
  800. }
  801. uint32 frameDisplaySlot = propertySym->m_propertyId;
  802. if ((*record)->table == nullptr || (*record)->slotId < frameDisplaySlot)
  803. {
  804. (*record)->slotId = frameDisplaySlot;
  805. }
  806. if (slotId != (uint32)-1)
  807. {
  808. if ((*record)->table == nullptr)
  809. {
  810. (*record)->table = SlotArrayCheckTable::New(m_alloc, 4);
  811. }
  812. uint32 *pSlotId = (*record)->table->FindOrInsert(slotId, frameDisplaySlot);
  813. if (pSlotId && *pSlotId < slotId)
  814. {
  815. *pSlotId = slotId;
  816. }
  817. }
  818. }
  819. void Func::InitLocalClosureSyms()
  820. {
  821. Assert(this->m_localClosureSym == nullptr);
  822. // Allocate stack space for closure pointers. Do this only if we're jitting for stack closures, and
  823. // tell bailout that these are not byte code symbols so that we don't try to encode them in the bailout record,
  824. // as they don't have normal lifetimes.
  825. Js::RegSlot regSlot = this->GetJnFunction()->GetLocalClosureReg();
  826. if (regSlot != Js::Constants::NoRegister)
  827. {
  828. this->m_localClosureSym =
  829. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  830. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  831. this);
  832. }
  833. regSlot = this->GetJnFunction()->GetLocalFrameDisplayReg();
  834. if (regSlot != Js::Constants::NoRegister)
  835. {
  836. this->m_localFrameDisplaySym =
  837. StackSym::FindOrCreate(static_cast<SymID>(regSlot),
  838. this->DoStackFrameDisplay() ? (Js::RegSlot)-1 : regSlot,
  839. this);
  840. }
  841. }
  842. bool Func::CanAllocInPreReservedHeapPageSegment ()
  843. {
  844. #ifdef _CONTROL_FLOW_GUARD
  845. return PHASE_FORCE1(Js::PreReservedHeapAllocPhase) || (!PHASE_OFF1(Js::PreReservedHeapAllocPhase) &&
  846. !IsJitInDebugMode() && !GetScriptContext()->IsInDebugMode() && GetScriptContext()->GetThreadContext()->IsCFGEnabled()
  847. #if _M_IX86
  848. && m_workItem->GetJitMode() == ExecutionMode::FullJit && GetCodeGenAllocators()->canCreatePreReservedSegment);
  849. #elif _M_X64
  850. && true);
  851. #else
  852. && false); //Not yet implemented for architectures other than x86 and amd64.
  853. #endif //_M_ARCH
  854. #else
  855. return false;
  856. #endif//_CONTROL_FLOW_GUARD
  857. }
  858. ///----------------------------------------------------------------------------
  859. ///
  860. /// Func::GetInstrCount
  861. ///
  862. /// Returns the number of instrs.
  863. /// Note: It counts all instrs for now, including labels, etc.
  864. ///
  865. ///----------------------------------------------------------------------------
  866. uint32
  867. Func::GetInstrCount()
  868. {
  869. uint instrCount = 0;
  870. FOREACH_INSTR_IN_FUNC(instr, this)
  871. {
  872. instrCount++;
  873. }NEXT_INSTR_IN_FUNC;
  874. return instrCount;
  875. }
  876. ///----------------------------------------------------------------------------
  877. ///
  878. /// Func::NumberInstrs
  879. ///
  880. /// Number each instruction in order of appearance in the function.
  881. ///
  882. ///----------------------------------------------------------------------------
  883. void
  884. Func::NumberInstrs()
  885. {
  886. #if DBG_DUMP
  887. Assert(this->IsTopFunc());
  888. Assert(!this->hasInstrNumber);
  889. this->hasInstrNumber = true;
  890. #endif
  891. uint instrCount = 1;
  892. FOREACH_INSTR_IN_FUNC(instr, this)
  893. {
  894. instr->SetNumber(instrCount++);
  895. }
  896. NEXT_INSTR_IN_FUNC;
  897. }
  898. ///----------------------------------------------------------------------------
  899. ///
  900. /// Func::IsInPhase
  901. ///
  902. /// Determines whether the function is currently in the provided phase
  903. ///
  904. ///----------------------------------------------------------------------------
  905. #if DBG
  906. bool
  907. Func::IsInPhase(Js::Phase tag)
  908. {
  909. return this->GetTopFunc()->currentPhases.Contains(tag);
  910. }
  911. #endif
  912. ///----------------------------------------------------------------------------
  913. ///
  914. /// Func::BeginPhase
  915. ///
  916. /// Takes care of the profiler
  917. ///
  918. ///----------------------------------------------------------------------------
  919. void
  920. Func::BeginPhase(Js::Phase tag)
  921. {
  922. #ifdef DBG
  923. this->GetTopFunc()->currentPhases.Push(tag);
  924. #endif
  925. #ifdef PROFILE_EXEC
  926. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  927. "Profiler tag is supplied but the profiler pointer is NULL");
  928. if (this->m_codeGenProfiler)
  929. {
  930. this->m_codeGenProfiler->ProfileBegin(tag);
  931. }
  932. #endif
  933. }
  934. ///----------------------------------------------------------------------------
  935. ///
  936. /// Func::EndPhase
  937. ///
  938. /// Takes care of the profiler and dumper
  939. ///
  940. ///----------------------------------------------------------------------------
  941. void
  942. Func::EndProfiler(Js::Phase tag)
  943. {
  944. #ifdef DBG
  945. Assert(this->GetTopFunc()->currentPhases.Count() > 0);
  946. Js::Phase popped = this->GetTopFunc()->currentPhases.Pop();
  947. Assert(tag == popped);
  948. #endif
  949. #ifdef PROFILE_EXEC
  950. AssertMsg((this->m_codeGenProfiler != nullptr) == Js::Configuration::Global.flags.IsEnabled(Js::ProfileFlag),
  951. "Profiler tag is supplied but the profiler pointer is NULL");
  952. if (this->m_codeGenProfiler)
  953. {
  954. this->m_codeGenProfiler->ProfileEnd(tag);
  955. }
  956. #endif
  957. }
  958. void
  959. Func::EndPhase(Js::Phase tag, bool dump)
  960. {
  961. this->EndProfiler(tag);
  962. #if DBG_DUMP
  963. if(dump && (PHASE_DUMP(tag, this)
  964. || PHASE_DUMP(Js::BackEndPhase, this)))
  965. {
  966. Output::Print(L"-----------------------------------------------------------------------------\n");
  967. if (m_workItem->Type() == JsLoopBodyWorkItemType)
  968. {
  969. Output::Print(L"************ IR after %s (%S) Loop %d ************\n", Js::PhaseNames[tag], ExecutionModeName(m_workItem->GetJitMode()), ((JsLoopBodyCodeGen*)m_workItem)->GetLoopNumber());
  970. }
  971. else
  972. {
  973. Output::Print(L"************ IR after %s (%S) ************\n", Js::PhaseNames[tag], ExecutionModeName(m_workItem->GetJitMode()));
  974. }
  975. this->Dump(Js::Configuration::Global.flags.AsmDiff? IRDumpFlags_AsmDumpMode : IRDumpFlags_None);
  976. }
  977. #endif
  978. #if DBG
  979. if (tag == Js::LowererPhase)
  980. {
  981. Assert(!this->isPostLower);
  982. this->isPostLower = true;
  983. }
  984. else if (tag == Js::RegAllocPhase)
  985. {
  986. Assert(!this->isPostRegAlloc);
  987. this->isPostRegAlloc = true;
  988. }
  989. else if (tag == Js::PeepsPhase)
  990. {
  991. Assert(this->isPostLower && !this->isPostLayout);
  992. this->isPostPeeps = true;
  993. }
  994. else if (tag == Js::LayoutPhase)
  995. {
  996. Assert(this->isPostPeeps && !this->isPostLayout);
  997. this->isPostLayout = true;
  998. }
  999. else if (tag == Js::FinalLowerPhase)
  1000. {
  1001. Assert(this->isPostLayout && !this->isPostFinalLower);
  1002. this->isPostFinalLower = true;
  1003. }
  1004. if (this->isPostLower)
  1005. {
  1006. #ifndef _M_ARM // Need to verify ARM is clean.
  1007. DbCheckPostLower dbCheck(this);
  1008. dbCheck.Check();
  1009. #endif
  1010. }
  1011. #endif
  1012. }
  1013. Func const *
  1014. Func::GetTopFunc() const
  1015. {
  1016. Func const * func = this;
  1017. while (!func->IsTopFunc())
  1018. {
  1019. func = func->parentFunc;
  1020. }
  1021. return func;
  1022. }
  1023. Func *
  1024. Func::GetTopFunc()
  1025. {
  1026. Func * func = this;
  1027. while (!func->IsTopFunc())
  1028. {
  1029. func = func->parentFunc;
  1030. }
  1031. return func;
  1032. }
  1033. StackSym *
  1034. Func::EnsureLoopParamSym()
  1035. {
  1036. if (this->m_loopParamSym == nullptr)
  1037. {
  1038. this->m_loopParamSym = StackSym::New(TyMachPtr, this);
  1039. }
  1040. return this->m_loopParamSym;
  1041. }
  1042. void
  1043. Func::UpdateMaxInlineeArgOutCount(uint inlineeArgOutCount)
  1044. {
  1045. if (maxInlineeArgOutCount < inlineeArgOutCount)
  1046. {
  1047. maxInlineeArgOutCount = inlineeArgOutCount;
  1048. }
  1049. }
  1050. void
  1051. Func::BeginClone(Lowerer * lowerer, JitArenaAllocator *alloc)
  1052. {
  1053. Assert(this->IsTopFunc());
  1054. AssertMsg(m_cloner == nullptr, "Starting new clone while one is in progress");
  1055. m_cloner = JitAnew(alloc, Cloner, lowerer, alloc);
  1056. if (m_cloneMap == nullptr)
  1057. {
  1058. m_cloneMap = JitAnew(alloc, InstrMap, alloc, 7);
  1059. }
  1060. }
  1061. void
  1062. Func::EndClone()
  1063. {
  1064. Assert(this->IsTopFunc());
  1065. if (m_cloner)
  1066. {
  1067. m_cloner->Finish();
  1068. JitAdelete(m_cloner->alloc, m_cloner);
  1069. m_cloner = nullptr;
  1070. }
  1071. }
  1072. IR::SymOpnd *
  1073. Func::GetInlineeOpndAtOffset(int32 offset)
  1074. {
  1075. Assert(IsInlinee());
  1076. StackSym *stackSym = CreateInlineeStackSym();
  1077. this->SetArgOffset(stackSym, stackSym->m_offset + offset);
  1078. Assert(stackSym->m_offset >= 0);
  1079. return IR::SymOpnd::New(stackSym, 0, TyMachReg, this);
  1080. }
  1081. StackSym *
  1082. Func::CreateInlineeStackSym()
  1083. {
  1084. // Make sure this is an inlinee and that GlobOpt has initialized the offset
  1085. // in the inlinee's frame.
  1086. Assert(IsInlinee());
  1087. Assert(m_inlineeFrameStartSym->m_offset != -1);
  1088. StackSym *stackSym = m_symTable->GetArgSlotSym((Js::ArgSlot)-1);
  1089. stackSym->m_isInlinedArgSlot = true;
  1090. stackSym->m_offset = m_inlineeFrameStartSym->m_offset;
  1091. stackSym->m_allocated = true;
  1092. return stackSym;
  1093. }
  1094. uint8 *
  1095. Func::GetCallsCountAddress() const
  1096. {
  1097. Assert(this->m_workItem->Type() == JsFunctionType);
  1098. JsFunctionCodeGen * functionCodeGen = static_cast<JsFunctionCodeGen *>(this->m_workItem);
  1099. return functionCodeGen->GetFunctionBody()->GetCallsCountAddress(functionCodeGen->GetEntryPoint());
  1100. }
  1101. RecyclerWeakReference<Js::FunctionBody> *
  1102. Func::GetWeakFuncRef() const
  1103. {
  1104. if (this->m_jitTimeData == nullptr)
  1105. {
  1106. return nullptr;
  1107. }
  1108. return this->m_jitTimeData->GetWeakFuncRef();
  1109. }
  1110. Js::InlineCache *
  1111. Func::GetRuntimeInlineCache(const uint index) const
  1112. {
  1113. if(this->m_runtimeData)
  1114. {
  1115. const auto inlineCache = this->m_runtimeData->ClonedInlineCaches()->GetInlineCache(this->m_jnFunction, index);
  1116. if(inlineCache)
  1117. {
  1118. return inlineCache;
  1119. }
  1120. }
  1121. return this->m_jnFunction->GetInlineCache(index);
  1122. }
  1123. Js::PolymorphicInlineCache *
  1124. Func::GetRuntimePolymorphicInlineCache(const uint index) const
  1125. {
  1126. if (this->m_polymorphicInlineCacheInfo)
  1127. {
  1128. return this->m_polymorphicInlineCacheInfo->GetPolymorphicInlineCaches()->GetInlineCache(this->m_jnFunction, index);
  1129. }
  1130. return nullptr;
  1131. }
  1132. byte
  1133. Func::GetPolyCacheUtilToInitialize(const uint index) const
  1134. {
  1135. return this->GetRuntimePolymorphicInlineCache(index) ? this->GetPolyCacheUtil(index) : PolymorphicInlineCacheUtilizationMinValue;
  1136. }
  1137. byte
  1138. Func::GetPolyCacheUtil(const uint index) const
  1139. {
  1140. return this->m_polymorphicInlineCacheInfo->GetUtilArray()->GetUtil(this->m_jnFunction, index);
  1141. }
  1142. Js::ObjTypeSpecFldInfo*
  1143. Func::GetObjTypeSpecFldInfo(const uint index) const
  1144. {
  1145. if (this->m_jitTimeData == nullptr)
  1146. {
  1147. return nullptr;
  1148. }
  1149. Assert(this->m_jitTimeData->GetObjTypeSpecFldInfoArray());
  1150. return this->m_jitTimeData->GetObjTypeSpecFldInfoArray()->GetInfo(this->m_jnFunction, index);
  1151. }
  1152. Js::ObjTypeSpecFldInfo*
  1153. Func::GetGlobalObjTypeSpecFldInfo(uint propertyInfoId) const
  1154. {
  1155. Assert(this->m_jitTimeData != nullptr);
  1156. return this->m_jitTimeData->GetGlobalObjTypeSpecFldInfo(propertyInfoId);
  1157. }
  1158. void
  1159. Func::SetGlobalObjTypeSpecFldInfo(uint propertyInfoId, Js::ObjTypeSpecFldInfo* info)
  1160. {
  1161. Assert(this->m_jitTimeData != nullptr);
  1162. this->m_jitTimeData->SetGlobalObjTypeSpecFldInfo(propertyInfoId, info);
  1163. }
  1164. void
  1165. Func::EnsurePinnedTypeRefs()
  1166. {
  1167. if (this->pinnedTypeRefs == nullptr)
  1168. {
  1169. this->pinnedTypeRefs = JitAnew(this->m_alloc, TypeRefSet, this->m_alloc);
  1170. }
  1171. }
  1172. void
  1173. Func::PinTypeRef(void* typeRef)
  1174. {
  1175. EnsurePinnedTypeRefs();
  1176. this->pinnedTypeRefs->AddNew(typeRef);
  1177. }
  1178. void
  1179. Func::EnsureSingleTypeGuards()
  1180. {
  1181. if (this->singleTypeGuards == nullptr)
  1182. {
  1183. this->singleTypeGuards = JitAnew(this->m_alloc, TypePropertyGuardDictionary, this->m_alloc);
  1184. }
  1185. }
  1186. Js::JitTypePropertyGuard*
  1187. Func::GetOrCreateSingleTypeGuard(Js::Type* type)
  1188. {
  1189. EnsureSingleTypeGuards();
  1190. Js::JitTypePropertyGuard* guard;
  1191. if (!this->singleTypeGuards->TryGetValue(type, &guard))
  1192. {
  1193. // Property guards are allocated by NativeCodeData::Allocator so that their lifetime extends as long as the EntryPointInfo is alive.
  1194. guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitTypePropertyGuard, type, this->indexedPropertyGuardCount++);
  1195. this->singleTypeGuards->Add(type, guard);
  1196. }
  1197. else
  1198. {
  1199. Assert(guard->GetType() == type);
  1200. }
  1201. return guard;
  1202. }
  1203. void
  1204. Func::EnsureEquivalentTypeGuards()
  1205. {
  1206. if (this->equivalentTypeGuards == nullptr)
  1207. {
  1208. this->equivalentTypeGuards = JitAnew(this->m_alloc, EquivalentTypeGuardList, this->m_alloc);
  1209. }
  1210. }
  1211. Js::JitEquivalentTypeGuard*
  1212. Func::CreateEquivalentTypeGuard(Js::Type* type, uint32 objTypeSpecFldId)
  1213. {
  1214. EnsureEquivalentTypeGuards();
  1215. Js::JitEquivalentTypeGuard* guard = NativeCodeDataNew(GetNativeCodeDataAllocator(), Js::JitEquivalentTypeGuard, type, this->indexedPropertyGuardCount++, objTypeSpecFldId);
  1216. // If we want to hard code the address of the cache, we will need to go back to allocating it from the native code data allocator.
  1217. // We would then need to maintain consistency (double write) to both the recycler allocated cache and the one on the heap.
  1218. Js::EquivalentTypeCache* cache = NativeCodeDataNewZ(GetTransferDataAllocator(), Js::EquivalentTypeCache);
  1219. guard->SetCache(cache);
  1220. // Give the cache a back-pointer to the guard so that the guard can be cleared at runtime if necessary.
  1221. cache->SetGuard(guard);
  1222. this->equivalentTypeGuards->Prepend(guard);
  1223. return guard;
  1224. }
  1225. void
  1226. Func::EnsurePropertyGuardsByPropertyId()
  1227. {
  1228. if (this->propertyGuardsByPropertyId == nullptr)
  1229. {
  1230. this->propertyGuardsByPropertyId = JitAnew(this->m_alloc, PropertyGuardByPropertyIdMap, this->m_alloc);
  1231. }
  1232. }
  1233. void
  1234. Func::EnsureCtorCachesByPropertyId()
  1235. {
  1236. if (this->ctorCachesByPropertyId == nullptr)
  1237. {
  1238. this->ctorCachesByPropertyId = JitAnew(this->m_alloc, CtorCachesByPropertyIdMap, this->m_alloc);
  1239. }
  1240. }
  1241. void
  1242. Func::LinkGuardToPropertyId(Js::PropertyId propertyId, Js::JitIndexedPropertyGuard* guard)
  1243. {
  1244. Assert(guard != nullptr);
  1245. Assert(guard->GetValue() != NULL);
  1246. Assert(this->propertyGuardsByPropertyId != nullptr);
  1247. IndexedPropertyGuardSet* set;
  1248. if (!this->propertyGuardsByPropertyId->TryGetValue(propertyId, &set))
  1249. {
  1250. set = JitAnew(this->m_alloc, IndexedPropertyGuardSet, this->m_alloc);
  1251. this->propertyGuardsByPropertyId->Add(propertyId, set);
  1252. }
  1253. set->Item(guard);
  1254. }
  1255. void
  1256. Func::LinkCtorCacheToPropertyId(Js::PropertyId propertyId, Js::JitTimeConstructorCache* cache)
  1257. {
  1258. Assert(cache != nullptr);
  1259. Assert(this->ctorCachesByPropertyId != nullptr);
  1260. CtorCacheSet* set;
  1261. if (!this->ctorCachesByPropertyId->TryGetValue(propertyId, &set))
  1262. {
  1263. set = JitAnew(this->m_alloc, CtorCacheSet, this->m_alloc);
  1264. this->ctorCachesByPropertyId->Add(propertyId, set);
  1265. }
  1266. set->Item(cache->runtimeCache);
  1267. }
  1268. Js::JitTimeConstructorCache* Func::GetConstructorCache(const Js::ProfileId profiledCallSiteId)
  1269. {
  1270. Assert(GetJnFunction() != nullptr);
  1271. Assert(profiledCallSiteId < GetJnFunction()->GetProfiledCallSiteCount());
  1272. Assert(this->constructorCaches != nullptr);
  1273. return this->constructorCaches[profiledCallSiteId];
  1274. }
  1275. void Func::SetConstructorCache(const Js::ProfileId profiledCallSiteId, Js::JitTimeConstructorCache* constructorCache)
  1276. {
  1277. const auto functionBody = this->GetJnFunction();
  1278. Assert(functionBody != nullptr);
  1279. Assert(profiledCallSiteId < functionBody->GetProfiledCallSiteCount());
  1280. Assert(constructorCache != nullptr);
  1281. Assert(this->constructorCaches != nullptr);
  1282. Assert(this->constructorCaches[profiledCallSiteId] == nullptr);
  1283. this->constructorCacheCount++;
  1284. this->constructorCaches[profiledCallSiteId] = constructorCache;
  1285. }
  1286. void Func::EnsurePropertiesWrittenTo()
  1287. {
  1288. if (this->propertiesWrittenTo == nullptr)
  1289. {
  1290. this->propertiesWrittenTo = JitAnew(this->m_alloc, PropertyIdSet, this->m_alloc);
  1291. }
  1292. }
  1293. void Func::EnsureCallSiteToArgumentsOffsetFixupMap()
  1294. {
  1295. if (this->callSiteToArgumentsOffsetFixupMap == nullptr)
  1296. {
  1297. this->callSiteToArgumentsOffsetFixupMap = JitAnew(this->m_alloc, CallSiteToArgumentsOffsetFixupMap, this->m_alloc);
  1298. }
  1299. }
  1300. IR::LabelInstr *
  1301. Func::GetFuncStartLabel()
  1302. {
  1303. return m_funcStartLabel;
  1304. }
  1305. IR::LabelInstr *
  1306. Func::EnsureFuncStartLabel()
  1307. {
  1308. if(m_funcStartLabel == nullptr)
  1309. {
  1310. m_funcStartLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1311. }
  1312. return m_funcStartLabel;
  1313. }
  1314. IR::LabelInstr *
  1315. Func::GetFuncEndLabel()
  1316. {
  1317. return m_funcEndLabel;
  1318. }
  1319. IR::LabelInstr *
  1320. Func::EnsureFuncEndLabel()
  1321. {
  1322. if(m_funcEndLabel == nullptr)
  1323. {
  1324. m_funcEndLabel = IR::LabelInstr::New( Js::OpCode::Label, this );
  1325. }
  1326. return m_funcEndLabel;
  1327. }
  1328. void
  1329. Cloner::AddInstr(IR::Instr * instrOrig, IR::Instr * instrClone)
  1330. {
  1331. if (!this->instrFirst)
  1332. {
  1333. this->instrFirst = instrClone;
  1334. }
  1335. this->instrLast = instrClone;
  1336. }
  1337. void
  1338. Cloner::Finish()
  1339. {
  1340. this->RetargetClonedBranches();
  1341. if (this->lowerer)
  1342. {
  1343. lowerer->LowerRange(this->instrFirst, this->instrLast, false, false);
  1344. }
  1345. }
  1346. void
  1347. Cloner::RetargetClonedBranches()
  1348. {
  1349. if (!this->fRetargetClonedBranch)
  1350. {
  1351. return;
  1352. }
  1353. FOREACH_INSTR_IN_RANGE(instr, this->instrFirst, this->instrLast)
  1354. {
  1355. if (instr->IsBranchInstr())
  1356. {
  1357. instr->AsBranchInstr()->RetargetClonedBranch();
  1358. }
  1359. }
  1360. NEXT_INSTR_IN_RANGE;
  1361. }
  1362. void Func::ThrowIfScriptClosed()
  1363. {
  1364. Js::ScriptContext* scriptContext = this->GetScriptContext();
  1365. if(scriptContext->IsClosed())
  1366. {
  1367. // Should not be jitting something in the foreground when the script context is actually closed
  1368. Assert(IsBackgroundJIT() || !scriptContext->IsActuallyClosed());
  1369. throw Js::OperationAbortedException();
  1370. }
  1371. }
  1372. IR::IndirOpnd * Func::GetConstantAddressIndirOpnd(void * address, IR::AddrOpndKind kind, IRType type, Js::OpCode loadOpCode)
  1373. {
  1374. Assert(this->GetTopFunc() == this);
  1375. if (!canHoistConstantAddressLoad)
  1376. {
  1377. // We can't hoist constant address load after lower, as we can't mark the sym as
  1378. // live on back edge
  1379. return nullptr;
  1380. }
  1381. int offset = 0;
  1382. IR::RegOpnd ** foundRegOpnd = this->constantAddressRegOpnd.Find([address, &offset](IR::RegOpnd * regOpnd)
  1383. {
  1384. Assert(regOpnd->m_sym->IsSingleDef());
  1385. void * curr = regOpnd->m_sym->m_instrDef->GetSrc1()->AsAddrOpnd()->m_address;
  1386. ptrdiff_t diff = (intptr_t)address - (intptr_t)curr;
  1387. if (!Math::FitsInDWord(diff))
  1388. {
  1389. return false;
  1390. }
  1391. offset = (int)diff;
  1392. return true;
  1393. });
  1394. IR::RegOpnd * addressRegOpnd;
  1395. if (foundRegOpnd != nullptr)
  1396. {
  1397. addressRegOpnd = *foundRegOpnd;
  1398. }
  1399. else
  1400. {
  1401. Assert(offset == 0);
  1402. addressRegOpnd = IR::RegOpnd::New(TyMachPtr, this);
  1403. IR::Instr *const newInstr =
  1404. IR::Instr::New(
  1405. loadOpCode,
  1406. addressRegOpnd,
  1407. IR::AddrOpnd::New(address, kind, this, true),
  1408. this);
  1409. this->constantAddressRegOpnd.Prepend(addressRegOpnd);
  1410. IR::Instr * insertBeforeInstr = this->lastConstantAddressRegLoadInstr;
  1411. if (insertBeforeInstr == nullptr)
  1412. {
  1413. insertBeforeInstr = this->GetFunctionEntryInsertionPoint();
  1414. this->lastConstantAddressRegLoadInstr = newInstr;
  1415. }
  1416. insertBeforeInstr->InsertBefore(newInstr);
  1417. }
  1418. IR::IndirOpnd * indirOpnd = IR::IndirOpnd::New(addressRegOpnd, offset, type, this, true);
  1419. #if DBG_DUMP
  1420. indirOpnd->SetAddrKind(kind, address);
  1421. #endif
  1422. return indirOpnd;
  1423. }
  1424. void Func::MarkConstantAddressSyms(BVSparse<JitArenaAllocator> * bv)
  1425. {
  1426. Assert(this->GetTopFunc() == this);
  1427. this->constantAddressRegOpnd.Iterate([bv](IR::RegOpnd * regOpnd)
  1428. {
  1429. bv->Set(regOpnd->m_sym->m_id);
  1430. });
  1431. }
  1432. IR::Instr *
  1433. Func::GetFunctionEntryInsertionPoint()
  1434. {
  1435. Assert(this->GetTopFunc() == this);
  1436. IR::Instr * insertInsert = this->lastConstantAddressRegLoadInstr;
  1437. if (insertInsert != nullptr)
  1438. {
  1439. return insertInsert->m_next;
  1440. }
  1441. insertInsert = this->m_headInstr;
  1442. if (this->HasTry())
  1443. {
  1444. // Insert it inside the root region
  1445. insertInsert = insertInsert->m_next;
  1446. Assert(insertInsert->IsLabelInstr() && insertInsert->AsLabelInstr()->GetRegion()->GetType() == RegionTypeRoot);
  1447. }
  1448. return insertInsert->m_next;
  1449. }
  1450. #if DBG_DUMP
  1451. ///----------------------------------------------------------------------------
  1452. ///
  1453. /// Func::DumpHeader
  1454. ///
  1455. ///----------------------------------------------------------------------------
  1456. void
  1457. Func::DumpHeader()
  1458. {
  1459. Output::Print(L"-----------------------------------------------------------------------------\n");
  1460. this->m_jnFunction->DumpFullFunctionName();
  1461. Output::SkipToColumn(50);
  1462. Output::Print(L"Instr Count:%d", GetInstrCount());
  1463. if(m_codeSize > 0)
  1464. {
  1465. Output::Print(L"\t\tSize:%d\n\n", m_codeSize);
  1466. }
  1467. else
  1468. {
  1469. Output::Print(L"\n\n");
  1470. }
  1471. }
  1472. ///----------------------------------------------------------------------------
  1473. ///
  1474. /// Func::Dump
  1475. ///
  1476. ///----------------------------------------------------------------------------
  1477. void
  1478. Func::Dump(IRDumpFlags flags)
  1479. {
  1480. this->DumpHeader();
  1481. FOREACH_INSTR_IN_FUNC(instr, this)
  1482. {
  1483. instr->DumpGlobOptInstrString();
  1484. instr->Dump(flags);
  1485. }NEXT_INSTR_IN_FUNC;
  1486. Output::Flush();
  1487. }
  1488. void
  1489. Func::Dump()
  1490. {
  1491. this->Dump(IRDumpFlags_None);
  1492. }
  1493. #endif
  1494. #if DBG_DUMP || defined(ENABLE_IR_VIEWER)
  1495. LPCSTR
  1496. Func::GetVtableName(INT_PTR address)
  1497. {
  1498. #if DBG
  1499. if (vtableMap == nullptr)
  1500. {
  1501. vtableMap = VirtualTableRegistry::CreateVtableHashMap(this->m_alloc);
  1502. };
  1503. LPCSTR name = vtableMap->Lookup(address, nullptr);
  1504. if (name)
  1505. {
  1506. if (strncmp(name, "class ", _countof("class ") - 1) == 0)
  1507. {
  1508. name += _countof("class ") - 1;
  1509. }
  1510. }
  1511. return name;
  1512. #else
  1513. return "";
  1514. #endif
  1515. }
  1516. #endif
  1517. #if DBG_DUMP | defined(VTUNE_PROFILING)
  1518. bool Func::DoRecordNativeMap() const
  1519. {
  1520. #if defined(VTUNE_PROFILING)
  1521. if (EtwTrace::isJitProfilingActive)
  1522. {
  1523. return true;
  1524. }
  1525. #endif
  1526. #if DBG_DUMP
  1527. return PHASE_DUMP(Js::EncoderPhase, this) && Js::Configuration::Global.flags.Verbose;
  1528. #else
  1529. return false;
  1530. #endif
  1531. }
  1532. #endif